org.apache.hadoop.yarn.proto.ClientSCMProtocol Maven / Gradle / Ivy
// Generated by the protocol buffer org.apache.hadoop.shaded.com.iler. DO NOT EDIT!
// source: client_SCM_protocol.proto
package org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto;
public final class ClientSCMProtocol {
private ClientSCMProtocol() {}
public static void registerAllExtensions(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
}
/**
* Protobuf service {@code hadoop.yarn.ClientSCMProtocolService}
*/
public static abstract class ClientSCMProtocolService
implements org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Service {
protected ClientSCMProtocolService() {}
public interface Interface {
/**
* rpc use(.hadoop.yarn.UseSharedCacheResourceRequestProto) returns (.hadoop.yarn.UseSharedCacheResourceResponseProto);
*/
public abstract void use(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
/**
* rpc release(.hadoop.yarn.ReleaseSharedCacheResourceRequestProto) returns (.hadoop.yarn.ReleaseSharedCacheResourceResponseProto);
*/
public abstract void release(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
final Interface impl) {
return new ClientSCMProtocolService() {
@java.lang.Override
public void use(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
impl.use(controller, request, done);
}
@java.lang.Override
public void release(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
impl.release(controller, request, done);
}
};
}
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.BlockingService() {
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.use(controller, (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto)request);
case 1:
return impl.release(controller, (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message
getRequestPrototype(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message
getResponsePrototype(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
* rpc use(.hadoop.yarn.UseSharedCacheResourceRequestProto) returns (.hadoop.yarn.UseSharedCacheResourceResponseProto);
*/
public abstract void use(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
/**
* rpc release(.hadoop.yarn.ReleaseSharedCacheResourceRequestProto) returns (.hadoop.yarn.ReleaseSharedCacheResourceResponseProto);
*/
public abstract void release(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
public static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.ClientSCMProtocol.getDescriptor().getServices().get(0);
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback<
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.use(controller, (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto)request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcUtil.specializeCallback(
done));
return;
case 1:
this.release(controller, (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto)request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcUtil.specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message
getRequestPrototype(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto.getDefaultInstance();
case 1:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message
getResponsePrototype(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.getDefaultInstance();
case 1:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.ClientSCMProtocol.ClientSCMProtocolService implements Interface {
private Stub(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
return channel;
}
public void use(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.getDefaultInstance(),
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.class,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.getDefaultInstance()));
}
public void release(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.getDefaultInstance(),
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.class,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto use(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException;
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto release(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto use(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceRequestProto request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException {
return (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.UseSharedCacheResourceResponseProto.getDefaultInstance());
}
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto release(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceRequestProto request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException {
return (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto) channel.callBlockingMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.ReleaseSharedCacheResourceResponseProto.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.ClientSCMProtocolService)
}
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\031client_SCM_protocol.proto\022\013hadoop.yarn" +
"\032\031yarn_service_protos.proto2\372\001\n\030ClientSC" +
"MProtocolService\022h\n\003use\022/.hadoop.yarn.Us" +
"eSharedCacheResourceRequestProto\0320.hadoo" +
"p.yarn.UseSharedCacheResourceResponsePro" +
"to\022t\n\007release\0223.hadoop.yarn.ReleaseShare" +
"dCacheResourceRequestProto\0324.hadoop.yarn" +
".ReleaseSharedCacheResourceResponseProto" +
"B7\n\034org.apache.hadoop.yarn.protoB\021Client" +
"SCMProtocol\210\001\001\240\001\001"
};
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry assignDescriptors(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.getDescriptor(),
}, assigner);
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.YarnServiceProtos.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy