
org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos Maven / Gradle / Ivy
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: test_rpc_service.proto
package org.apache.hadoop.ipc.protobuf;
public final class TestRpcServiceProtos {
private TestRpcServiceProtos() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
/**
* Protobuf service {@code hadoop.common.TestProtobufRpcProto}
*
*
**
* A protobuf service for use in tests
*
*/
public static abstract class TestProtobufRpcProto
implements com.google.protobuf.Service {
protected TestProtobufRpcProto() {}
public interface Interface {
/**
* rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echo(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc error(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc error2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void error2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc slowPing(.hadoop.common.SlowPingRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void slowPing(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echo2(.hadoop.common.EchoRequestProto2) returns (.hadoop.common.EchoResponseProto2);
*/
public abstract void echo2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 request,
com.google.protobuf.RpcCallback done);
/**
* rpc add(.hadoop.common.AddRequestProto) returns (.hadoop.common.AddResponseProto);
*/
public abstract void add(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc add2(.hadoop.common.AddRequestProto2) returns (.hadoop.common.AddResponseProto);
*/
public abstract void add2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 request,
com.google.protobuf.RpcCallback done);
/**
* rpc testServerGet(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void testServerGet(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc exchange(.hadoop.common.ExchangeRequestProto) returns (.hadoop.common.ExchangeResponseProto);
*/
public abstract void exchange(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void sleep(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc getAuthMethod(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.AuthMethodResponseProto);
*/
public abstract void getAuthMethod(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc getAuthUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.AuthUserResponseProto);
*/
public abstract void getAuthUser(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echoPostponed(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);
*/
public abstract void echoPostponed(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc sendPostponed(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void sendPostponed(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
}
public static com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new TestProtobufRpcProto() {
@java.lang.Override
public void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.ping(controller, request, done);
}
@java.lang.Override
public void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.echo(controller, request, done);
}
@java.lang.Override
public void error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.error(controller, request, done);
}
@java.lang.Override
public void error2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.error2(controller, request, done);
}
@java.lang.Override
public void slowPing(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.slowPing(controller, request, done);
}
@java.lang.Override
public void echo2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 request,
com.google.protobuf.RpcCallback done) {
impl.echo2(controller, request, done);
}
@java.lang.Override
public void add(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.add(controller, request, done);
}
@java.lang.Override
public void add2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 request,
com.google.protobuf.RpcCallback done) {
impl.add2(controller, request, done);
}
@java.lang.Override
public void testServerGet(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.testServerGet(controller, request, done);
}
@java.lang.Override
public void exchange(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.exchange(controller, request, done);
}
@java.lang.Override
public void sleep(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.sleep(controller, request, done);
}
@java.lang.Override
public void getAuthMethod(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.getAuthMethod(controller, request, done);
}
@java.lang.Override
public void getAuthUser(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.getAuthUser(controller, request, done);
}
@java.lang.Override
public void echoPostponed(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.echoPostponed(controller, request, done);
}
@java.lang.Override
public void sendPostponed(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.sendPostponed(controller, request, done);
}
};
}
public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new com.google.protobuf.BlockingService() {
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final com.google.protobuf.Message callBlockingMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request)
throws com.google.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
case 1:
return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request);
case 2:
return impl.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
case 3:
return impl.error2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
case 4:
return impl.slowPing(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto)request);
case 5:
return impl.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2)request);
case 6:
return impl.add(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto)request);
case 7:
return impl.add2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2)request);
case 8:
return impl.testServerGet(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
case 9:
return impl.exchange(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto)request);
case 10:
return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)request);
case 11:
return impl.getAuthMethod(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
case 12:
return impl.getAuthUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
case 13:
return impl.echoPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request);
case 14:
return impl.sendPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
case 2:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 3:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 4:
return org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.getDefaultInstance();
case 5:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.getDefaultInstance();
case 6:
return org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.getDefaultInstance();
case 7:
return org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.getDefaultInstance();
case 8:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 9:
return org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.getDefaultInstance();
case 10:
return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
case 11:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 12:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 13:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
case 14:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
case 2:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 3:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 4:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 5:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance();
case 6:
return org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance();
case 7:
return org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance();
case 8:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 9:
return org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance();
case 10:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 11:
return org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance();
case 12:
return org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.getDefaultInstance();
case 13:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
case 14:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
* rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echo(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc error(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc error2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void error2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc slowPing(.hadoop.common.SlowPingRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void slowPing(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echo2(.hadoop.common.EchoRequestProto2) returns (.hadoop.common.EchoResponseProto2);
*/
public abstract void echo2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 request,
com.google.protobuf.RpcCallback done);
/**
* rpc add(.hadoop.common.AddRequestProto) returns (.hadoop.common.AddResponseProto);
*/
public abstract void add(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc add2(.hadoop.common.AddRequestProto2) returns (.hadoop.common.AddResponseProto);
*/
public abstract void add2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 request,
com.google.protobuf.RpcCallback done);
/**
* rpc testServerGet(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void testServerGet(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc exchange(.hadoop.common.ExchangeRequestProto) returns (.hadoop.common.ExchangeResponseProto);
*/
public abstract void exchange(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void sleep(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc getAuthMethod(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.AuthMethodResponseProto);
*/
public abstract void getAuthMethod(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc getAuthUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.AuthUserResponseProto);
*/
public abstract void getAuthUser(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echoPostponed(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);
*/
public abstract void echoPostponed(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc sendPostponed(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void sendPostponed(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(0);
}
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request,
com.google.protobuf.RpcCallback<
com.google.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 1:
this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 2:
this.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 3:
this.error2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 4:
this.slowPing(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 5:
this.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 6:
this.add(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 7:
this.add2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 8:
this.testServerGet(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 9:
this.exchange(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 10:
this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 11:
this.getAuthMethod(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 12:
this.getAuthUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 13:
this.echoPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 14:
this.sendPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
case 2:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 3:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 4:
return org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.getDefaultInstance();
case 5:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.getDefaultInstance();
case 6:
return org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.getDefaultInstance();
case 7:
return org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.getDefaultInstance();
case 8:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 9:
return org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.getDefaultInstance();
case 10:
return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
case 11:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 12:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 13:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
case 14:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
case 2:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 3:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 4:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 5:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance();
case 6:
return org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance();
case 7:
return org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance();
case 8:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 9:
return org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance();
case 10:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 11:
return org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance();
case 12:
return org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.getDefaultInstance();
case 13:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
case 14:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcProto implements Interface {
private Stub(com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.RpcChannel channel;
public com.google.protobuf.RpcChannel getChannel() {
return channel;
}
public void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
public void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()));
}
public void error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(2),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
public void error2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(3),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
public void slowPing(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(4),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
public void echo2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(5),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance()));
}
public void add(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(6),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance()));
}
public void add2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(7),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance()));
}
public void testServerGet(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(8),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
public void exchange(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(9),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance()));
}
public void sleep(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(10),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
public void getAuthMethod(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(11),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance()));
}
public void getAuthUser(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(12),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.getDefaultInstance()));
}
public void echoPostponed(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(13),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()));
}
public void sendPostponed(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(14),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto error2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto slowPing(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 echo2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto add(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto add2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto testServerGet(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto exchange(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto sleep(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto getAuthMethod(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto getAuthUser(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echoPostponed(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto sendPostponed(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(2),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto error2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(3),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto slowPing(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(4),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 echo2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2) channel.callBlockingMethod(
getDescriptor().getMethods().get(5),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto add(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(6),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto add2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(7),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto testServerGet(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(8),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto exchange(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(9),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto sleep(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(10),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto getAuthMethod(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(11),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto getAuthUser(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(12),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.AuthUserResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echoPostponed(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(13),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto sendPostponed(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(14),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpcProto)
}
/**
* Protobuf service {@code hadoop.common.TestProtobufRpc2Proto}
*/
public static abstract class TestProtobufRpc2Proto
implements com.google.protobuf.Service {
protected TestProtobufRpc2Proto() {}
public interface Interface {
/**
* rpc ping2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void ping2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echo2(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);
*/
public abstract void echo2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.SleepResponseProto);
*/
public abstract void sleep(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
com.google.protobuf.RpcCallback done);
}
public static com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new TestProtobufRpc2Proto() {
@java.lang.Override
public void ping2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.ping2(controller, request, done);
}
@java.lang.Override
public void echo2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.echo2(controller, request, done);
}
@java.lang.Override
public void sleep(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.sleep(controller, request, done);
}
};
}
public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new com.google.protobuf.BlockingService() {
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final com.google.protobuf.Message callBlockingMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request)
throws com.google.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.ping2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
case 1:
return impl.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request);
case 2:
return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
case 2:
return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
case 2:
return org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
* rpc ping2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void ping2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echo2(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);
*/
public abstract void echo2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.SleepResponseProto);
*/
public abstract void sleep(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
com.google.protobuf.RpcCallback done);
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(1);
}
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request,
com.google.protobuf.RpcCallback<
com.google.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.ping2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 1:
this.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 2:
this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
case 2:
return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
case 2:
return org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpc2Proto implements Interface {
private Stub(com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.RpcChannel channel;
public com.google.protobuf.RpcChannel getChannel() {
return channel;
}
public void ping2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
public void echo2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()));
}
public void sleep(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(2),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto sleep(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request)
throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo2(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto sleep(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(2),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpc2Proto)
}
/**
* Protobuf service {@code hadoop.common.OldProtobufRpcProto}
*/
public static abstract class OldProtobufRpcProto
implements com.google.protobuf.Service {
protected OldProtobufRpcProto() {}
public interface Interface {
/**
* rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
}
public static com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new OldProtobufRpcProto() {
@java.lang.Override
public void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.ping(controller, request, done);
}
@java.lang.Override
public void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.echo(controller, request, done);
}
};
}
public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new com.google.protobuf.BlockingService() {
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final com.google.protobuf.Message callBlockingMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request)
throws com.google.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
case 1:
return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
* rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(2);
}
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request,
com.google.protobuf.RpcCallback<
com.google.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 1:
this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.OldProtobufRpcProto implements Interface {
private Stub(com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.RpcChannel channel;
public com.google.protobuf.RpcChannel getChannel() {
return channel;
}
public void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
public void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:hadoop.common.OldProtobufRpcProto)
}
/**
* Protobuf service {@code hadoop.common.NewProtobufRpcProto}
*/
public static abstract class NewProtobufRpcProto
implements com.google.protobuf.Service {
protected NewProtobufRpcProto() {}
public interface Interface {
/**
* rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echo(.hadoop.common.OptRequestProto) returns (.hadoop.common.OptResponseProto);
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto request,
com.google.protobuf.RpcCallback done);
}
public static com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new NewProtobufRpcProto() {
@java.lang.Override
public void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.ping(controller, request, done);
}
@java.lang.Override
public void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.echo(controller, request, done);
}
};
}
public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new com.google.protobuf.BlockingService() {
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final com.google.protobuf.Message callBlockingMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request)
throws com.google.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
case 1:
return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
* rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echo(.hadoop.common.OptRequestProto) returns (.hadoop.common.OptResponseProto);
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto request,
com.google.protobuf.RpcCallback done);
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(3);
}
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request,
com.google.protobuf.RpcCallback<
com.google.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 1:
this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.NewProtobufRpcProto implements Interface {
private Stub(com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.RpcChannel channel;
public com.google.protobuf.RpcChannel getChannel() {
return channel;
}
public void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
public void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto request)
throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:hadoop.common.NewProtobufRpcProto)
}
/**
* Protobuf service {@code hadoop.common.NewerProtobufRpcProto}
*/
public static abstract class NewerProtobufRpcProto
implements com.google.protobuf.Service {
protected NewerProtobufRpcProto() {}
public interface Interface {
/**
* rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
}
public static com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new NewerProtobufRpcProto() {
@java.lang.Override
public void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.ping(controller, request, done);
}
@java.lang.Override
public void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.echo(controller, request, done);
}
};
}
public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new com.google.protobuf.BlockingService() {
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final com.google.protobuf.Message callBlockingMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request)
throws com.google.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
case 1:
return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
* rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done);
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(4);
}
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request,
com.google.protobuf.RpcCallback<
com.google.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 1:
this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.NewerProtobufRpcProto implements Interface {
private Stub(com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.RpcChannel channel;
public com.google.protobuf.RpcChannel getChannel() {
return channel;
}
public void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
public void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:hadoop.common.NewerProtobufRpcProto)
}
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\026test_rpc_service.proto\022\rhadoop.common\032" +
"\ntest.proto2\326\t\n\024TestProtobufRpcProto\022K\n\004" +
"ping\022 .hadoop.common.EmptyRequestProto\032!" +
".hadoop.common.EmptyResponseProto\022I\n\004ech" +
"o\022\037.hadoop.common.EchoRequestProto\032 .had" +
"oop.common.EchoResponseProto\022L\n\005error\022 ." +
"hadoop.common.EmptyRequestProto\032!.hadoop" +
".common.EmptyResponseProto\022M\n\006error2\022 .h" +
"adoop.common.EmptyRequestProto\032!.hadoop." +
"common.EmptyResponseProto\022R\n\010slowPing\022#.",
"hadoop.common.SlowPingRequestProto\032!.had" +
"oop.common.EmptyResponseProto\022L\n\005echo2\022 " +
".hadoop.common.EchoRequestProto2\032!.hadoo" +
"p.common.EchoResponseProto2\022F\n\003add\022\036.had" +
"oop.common.AddRequestProto\032\037.hadoop.comm" +
"on.AddResponseProto\022H\n\004add2\022\037.hadoop.com" +
"mon.AddRequestProto2\032\037.hadoop.common.Add" +
"ResponseProto\022T\n\rtestServerGet\022 .hadoop." +
"common.EmptyRequestProto\032!.hadoop.common" +
".EmptyResponseProto\022U\n\010exchange\022#.hadoop",
".common.ExchangeRequestProto\032$.hadoop.co" +
"mmon.ExchangeResponseProto\022L\n\005sleep\022 .ha" +
"doop.common.SleepRequestProto\032!.hadoop.c" +
"ommon.EmptyResponseProto\022Y\n\rgetAuthMetho" +
"d\022 .hadoop.common.EmptyRequestProto\032&.ha" +
"doop.common.AuthMethodResponseProto\022U\n\013g" +
"etAuthUser\022 .hadoop.common.EmptyRequestP" +
"roto\032$.hadoop.common.AuthUserResponsePro" +
"to\022R\n\rechoPostponed\022\037.hadoop.common.Echo" +
"RequestProto\032 .hadoop.common.EchoRespons",
"eProto\022T\n\rsendPostponed\022 .hadoop.common." +
"EmptyRequestProto\032!.hadoop.common.EmptyR" +
"esponseProto2\377\001\n\025TestProtobufRpc2Proto\022L" +
"\n\005ping2\022 .hadoop.common.EmptyRequestProt" +
"o\032!.hadoop.common.EmptyResponseProto\022J\n\005" +
"echo2\022\037.hadoop.common.EchoRequestProto\032 " +
".hadoop.common.EchoResponseProto\022L\n\005slee" +
"p\022 .hadoop.common.SleepRequestProto\032!.ha" +
"doop.common.SleepResponseProto2\257\001\n\023OldPr" +
"otobufRpcProto\022K\n\004ping\022 .hadoop.common.E",
"mptyRequestProto\032!.hadoop.common.EmptyRe" +
"sponseProto\022K\n\004echo\022 .hadoop.common.Empt" +
"yRequestProto\032!.hadoop.common.EmptyRespo" +
"nseProto2\253\001\n\023NewProtobufRpcProto\022K\n\004ping" +
"\022 .hadoop.common.EmptyRequestProto\032!.had" +
"oop.common.EmptyResponseProto\022G\n\004echo\022\036." +
"hadoop.common.OptRequestProto\032\037.hadoop.c" +
"ommon.OptResponseProto2\261\001\n\025NewerProtobuf" +
"RpcProto\022K\n\004ping\022 .hadoop.common.EmptyRe" +
"questProto\032!.hadoop.common.EmptyResponse",
"Proto\022K\n\004echo\022 .hadoop.common.EmptyReque" +
"stProto\032!.hadoop.common.EmptyResponsePro" +
"toB<\n\036org.apache.hadoop.ipc.protobufB\024Te" +
"stRpcServiceProtos\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.ipc.protobuf.TestProtos.getDescriptor(),
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy