org.apache.hadoop.yarn.proto.CsiAdaptorProtocol Maven / Gradle / Ivy
// Generated by the protocol buffer org.apache.hadoop.shaded.com.iler. DO NOT EDIT!
// source: YarnCsiAdaptor.proto
package org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto;
public final class CsiAdaptorProtocol {
private CsiAdaptorProtocol() {}
public static void registerAllExtensions(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
}
/**
* Protobuf service {@code hadoop.yarn.CsiAdaptorProtocolService}
*/
public static abstract class CsiAdaptorProtocolService
implements org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Service {
protected CsiAdaptorProtocolService() {}
public interface Interface {
/**
* rpc getPluginInfo(.hadoop.yarn.GetPluginInfoRequest) returns (.hadoop.yarn.GetPluginInfoResponse);
*/
public abstract void getPluginInfo(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
/**
* rpc validateVolumeCapacity(.hadoop.yarn.ValidateVolumeCapabilitiesRequest) returns (.hadoop.yarn.ValidateVolumeCapabilitiesResponse);
*/
public abstract void validateVolumeCapacity(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
/**
* rpc nodePublishVolume(.hadoop.yarn.NodePublishVolumeRequest) returns (.hadoop.yarn.NodePublishVolumeResponse);
*/
public abstract void nodePublishVolume(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
/**
* rpc nodeUnpublishVolume(.hadoop.yarn.NodeUnpublishVolumeRequest) returns (.hadoop.yarn.NodeUnpublishVolumeResponse);
*/
public abstract void nodeUnpublishVolume(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
}
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
final Interface impl) {
return new CsiAdaptorProtocolService() {
@java.lang.Override
public void getPluginInfo(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
impl.getPluginInfo(controller, request, done);
}
@java.lang.Override
public void validateVolumeCapacity(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
impl.validateVolumeCapacity(controller, request, done);
}
@java.lang.Override
public void nodePublishVolume(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
impl.nodePublishVolume(controller, request, done);
}
@java.lang.Override
public void nodeUnpublishVolume(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
impl.nodeUnpublishVolume(controller, request, done);
}
};
}
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.BlockingService() {
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.getPluginInfo(controller, (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest)request);
case 1:
return impl.validateVolumeCapacity(controller, (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest)request);
case 2:
return impl.nodePublishVolume(controller, (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest)request);
case 3:
return impl.nodeUnpublishVolume(controller, (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message
getRequestPrototype(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.getDefaultInstance();
case 1:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.getDefaultInstance();
case 2:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.getDefaultInstance();
case 3:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message
getResponsePrototype(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.getDefaultInstance();
case 1:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.getDefaultInstance();
case 2:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.getDefaultInstance();
case 3:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
* rpc getPluginInfo(.hadoop.yarn.GetPluginInfoRequest) returns (.hadoop.yarn.GetPluginInfoResponse);
*/
public abstract void getPluginInfo(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
/**
* rpc validateVolumeCapacity(.hadoop.yarn.ValidateVolumeCapabilitiesRequest) returns (.hadoop.yarn.ValidateVolumeCapabilitiesResponse);
*/
public abstract void validateVolumeCapacity(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
/**
* rpc nodePublishVolume(.hadoop.yarn.NodePublishVolumeRequest) returns (.hadoop.yarn.NodePublishVolumeResponse);
*/
public abstract void nodePublishVolume(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
/**
* rpc nodeUnpublishVolume(.hadoop.yarn.NodeUnpublishVolumeRequest) returns (.hadoop.yarn.NodeUnpublishVolumeResponse);
*/
public abstract void nodeUnpublishVolume(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done);
public static final
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtocol.getDescriptor().getServices().get(0);
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback<
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.getPluginInfo(controller, (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest)request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcUtil.specializeCallback(
done));
return;
case 1:
this.validateVolumeCapacity(controller, (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest)request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcUtil.specializeCallback(
done));
return;
case 2:
this.nodePublishVolume(controller, (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest)request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcUtil.specializeCallback(
done));
return;
case 3:
this.nodeUnpublishVolume(controller, (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest)request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcUtil.specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message
getRequestPrototype(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.getDefaultInstance();
case 1:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.getDefaultInstance();
case 2:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.getDefaultInstance();
case 3:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Message
getResponsePrototype(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.getDefaultInstance();
case 1:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.getDefaultInstance();
case 2:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.getDefaultInstance();
case 3:
return org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtocol.CsiAdaptorProtocolService implements Interface {
private Stub(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
return channel;
}
public void getPluginInfo(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.getDefaultInstance(),
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.class,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.getDefaultInstance()));
}
public void validateVolumeCapacity(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.getDefaultInstance(),
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.class,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.getDefaultInstance()));
}
public void nodePublishVolume(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(2),
controller,
request,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.getDefaultInstance(),
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.class,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.getDefaultInstance()));
}
public void nodeUnpublishVolume(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest request,
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
channel.callMethod(
getDescriptor().getMethods().get(3),
controller,
request,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.getDefaultInstance(),
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.class,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse getPluginInfo(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException;
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse validateVolumeCapacity(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException;
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse nodePublishVolume(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException;
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse nodeUnpublishVolume(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse getPluginInfo(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException {
return (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.getDefaultInstance());
}
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse validateVolumeCapacity(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException {
return (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.getDefaultInstance());
}
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse nodePublishVolume(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException {
return (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(2),
controller,
request,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.getDefaultInstance());
}
public org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse nodeUnpublishVolume(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.RpcController controller,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest request)
throws org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ServiceException {
return (org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(3),
controller,
request,
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:hadoop.yarn.CsiAdaptorProtocolService)
}
public static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\024YarnCsiAdaptor.proto\022\013hadoop.yarn\032\026yar" +
"n_csi_adaptor.proto2\274\003\n\031CsiAdaptorProtoc" +
"olService\022V\n\rgetPluginInfo\022!.hadoop.yarn" +
".GetPluginInfoRequest\032\".hadoop.yarn.GetP" +
"luginInfoResponse\022y\n\026validateVolumeCapac" +
"ity\022..hadoop.yarn.ValidateVolumeCapabili" +
"tiesRequest\032/.hadoop.yarn.ValidateVolume" +
"CapabilitiesResponse\022b\n\021nodePublishVolum" +
"e\022%.hadoop.yarn.NodePublishVolumeRequest" +
"\032&.hadoop.yarn.NodePublishVolumeResponse" +
"\022h\n\023nodeUnpublishVolume\022\'.hadoop.yarn.No" +
"deUnpublishVolumeRequest\032(.hadoop.yarn.N" +
"odeUnpublishVolumeResponseB8\n\034org.apache" +
".hadoop.yarn.protoB\022CsiAdaptorProtocol\210\001" +
"\001\240\001\001"
};
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry assignDescriptors(
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new org.apache.hadoop.shaded.org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.getDescriptor(),
}, assigner);
org.apache.hadoop.shaded.org.apache.hadoop.yarn.proto.CsiAdaptorProtos.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy