Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.apache.hadoop.yarn.proto.ApplicationMasterProtocol Maven / Gradle / Ivy
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: applicationmaster_protocol.proto
package org.apache.hadoop.yarn.proto;
public final class ApplicationMasterProtocol {
private ApplicationMasterProtocol() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
/**
* Protobuf service {@code hadoop.yarn.ApplicationMasterProtocolService}
*/
public static abstract class ApplicationMasterProtocolService
implements com.google.protobuf.Service {
protected ApplicationMasterProtocolService() {}
public interface Interface {
/**
* rpc registerApplicationMaster(.hadoop.yarn.RegisterApplicationMasterRequestProto) returns (.hadoop.yarn.RegisterApplicationMasterResponseProto);
*/
public abstract void registerApplicationMaster(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc finishApplicationMaster(.hadoop.yarn.FinishApplicationMasterRequestProto) returns (.hadoop.yarn.FinishApplicationMasterResponseProto);
*/
public abstract void finishApplicationMaster(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc allocate(.hadoop.yarn.AllocateRequestProto) returns (.hadoop.yarn.AllocateResponseProto);
*/
public abstract void allocate(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto request,
com.google.protobuf.RpcCallback done);
}
public static com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new ApplicationMasterProtocolService() {
@java.lang.Override
public void registerApplicationMaster(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.registerApplicationMaster(controller, request, done);
}
@java.lang.Override
public void finishApplicationMaster(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.finishApplicationMaster(controller, request, done);
}
@java.lang.Override
public void allocate(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto request,
com.google.protobuf.RpcCallback done) {
impl.allocate(controller, request, done);
}
};
}
public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new com.google.protobuf.BlockingService() {
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final com.google.protobuf.Message callBlockingMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request)
throws com.google.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.registerApplicationMaster(controller, (org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto)request);
case 1:
return impl.finishApplicationMaster(controller, (org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto)request);
case 2:
return impl.allocate(controller, (org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.getDefaultInstance();
case 2:
return org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.getDefaultInstance();
case 2:
return org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
* rpc registerApplicationMaster(.hadoop.yarn.RegisterApplicationMasterRequestProto) returns (.hadoop.yarn.RegisterApplicationMasterResponseProto);
*/
public abstract void registerApplicationMaster(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc finishApplicationMaster(.hadoop.yarn.FinishApplicationMasterRequestProto) returns (.hadoop.yarn.FinishApplicationMasterResponseProto);
*/
public abstract void finishApplicationMaster(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto request,
com.google.protobuf.RpcCallback done);
/**
* rpc allocate(.hadoop.yarn.AllocateRequestProto) returns (.hadoop.yarn.AllocateResponseProto);
*/
public abstract void allocate(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto request,
com.google.protobuf.RpcCallback done);
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.yarn.proto.ApplicationMasterProtocol.getDescriptor().getServices().get(0);
}
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request,
com.google.protobuf.RpcCallback<
com.google.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.registerApplicationMaster(controller, (org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 1:
this.finishApplicationMaster(controller, (org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
case 2:
this.allocate(controller, (org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto.getDefaultInstance();
case 2:
return org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.getDefaultInstance();
case 2:
return org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.yarn.proto.ApplicationMasterProtocol.ApplicationMasterProtocolService implements Interface {
private Stub(com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.RpcChannel channel;
public com.google.protobuf.RpcChannel getChannel() {
return channel;
}
public void registerApplicationMaster(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.class,
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance()));
}
public void finishApplicationMaster(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.class,
org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.getDefaultInstance()));
}
public void allocate(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(2),
controller,
request,
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.class,
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto registerApplicationMaster(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto finishApplicationMaster(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto allocate(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto request)
throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto registerApplicationMaster(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto.getDefaultInstance());
}
public org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto finishApplicationMaster(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto.getDefaultInstance());
}
public org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto allocate(
com.google.protobuf.RpcController controller,
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(2),
controller,
request,
org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationMasterProtocolService)
}
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n applicationmaster_protocol.proto\022\013hado" +
"op.yarn\032\031yarn_service_protos.proto2\374\002\n A" +
"pplicationMasterProtocolService\022\204\001\n\031regi" +
"sterApplicationMaster\0222.hadoop.yarn.Regi" +
"sterApplicationMasterRequestProto\0323.hado" +
"op.yarn.RegisterApplicationMasterRespons" +
"eProto\022~\n\027finishApplicationMaster\0220.hado" +
"op.yarn.FinishApplicationMasterRequestPr" +
"oto\0321.hadoop.yarn.FinishApplicationMaste" +
"rResponseProto\022Q\n\010allocate\022!.hadoop.yarn",
".AllocateRequestProto\032\".hadoop.yarn.Allo" +
"cateResponseProtoB?\n\034org.apache.hadoop.y" +
"arn.protoB\031ApplicationMasterProtocol\210\001\001\240" +
"\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.yarn.proto.YarnServiceProtos.getDescriptor(),
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}