All Downloads are FREE. Search and download functionalities are using the official Maven repository.

tensorflow.serving.PredictionServiceGrpc Maven / Gradle / Ivy

The newest version!
package tensorflow.serving;

import static io.grpc.MethodDescriptor.generateFullMethodName;
import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall;
import static io.grpc.stub.ClientCalls.asyncClientStreamingCall;
import static io.grpc.stub.ClientCalls.asyncServerStreamingCall;
import static io.grpc.stub.ClientCalls.asyncUnaryCall;
import static io.grpc.stub.ClientCalls.blockingServerStreamingCall;
import static io.grpc.stub.ClientCalls.blockingUnaryCall;
import static io.grpc.stub.ClientCalls.futureUnaryCall;
import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall;
import static io.grpc.stub.ServerCalls.asyncClientStreamingCall;
import static io.grpc.stub.ServerCalls.asyncServerStreamingCall;
import static io.grpc.stub.ServerCalls.asyncUnaryCall;
import static io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall;
import static io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall;

/**
 * 
 * open source marker; do not remove
 * PredictionService provides access to machine-learned models loaded by
 * model_servers.
 * 
*/ @javax.annotation.Generated( value = "by gRPC proto compiler (version 1.18.0)", comments = "Source: tensorflow_serving/apis/prediction_service.proto") public final class PredictionServiceGrpc { private PredictionServiceGrpc() {} public static final String SERVICE_NAME = "tensorflow.serving.PredictionService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor getClassifyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "Classify", requestType = tensorflow.serving.Classification.ClassificationRequest.class, responseType = tensorflow.serving.Classification.ClassificationResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getClassifyMethod() { io.grpc.MethodDescriptor getClassifyMethod; if ((getClassifyMethod = PredictionServiceGrpc.getClassifyMethod) == null) { synchronized (PredictionServiceGrpc.class) { if ((getClassifyMethod = PredictionServiceGrpc.getClassifyMethod) == null) { PredictionServiceGrpc.getClassifyMethod = getClassifyMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName( "tensorflow.serving.PredictionService", "Classify")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( tensorflow.serving.Classification.ClassificationRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( tensorflow.serving.Classification.ClassificationResponse.getDefaultInstance())) .setSchemaDescriptor(new PredictionServiceMethodDescriptorSupplier("Classify")) .build(); } } } return getClassifyMethod; } private static volatile io.grpc.MethodDescriptor getRegressMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "Regress", requestType = tensorflow.serving.RegressionOuterClass.RegressionRequest.class, responseType = tensorflow.serving.RegressionOuterClass.RegressionResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getRegressMethod() { io.grpc.MethodDescriptor getRegressMethod; if ((getRegressMethod = PredictionServiceGrpc.getRegressMethod) == null) { synchronized (PredictionServiceGrpc.class) { if ((getRegressMethod = PredictionServiceGrpc.getRegressMethod) == null) { PredictionServiceGrpc.getRegressMethod = getRegressMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName( "tensorflow.serving.PredictionService", "Regress")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( tensorflow.serving.RegressionOuterClass.RegressionRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( tensorflow.serving.RegressionOuterClass.RegressionResponse.getDefaultInstance())) .setSchemaDescriptor(new PredictionServiceMethodDescriptorSupplier("Regress")) .build(); } } } return getRegressMethod; } private static volatile io.grpc.MethodDescriptor getPredictMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "Predict", requestType = tensorflow.serving.Predict.PredictRequest.class, responseType = tensorflow.serving.Predict.PredictResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getPredictMethod() { io.grpc.MethodDescriptor getPredictMethod; if ((getPredictMethod = PredictionServiceGrpc.getPredictMethod) == null) { synchronized (PredictionServiceGrpc.class) { if ((getPredictMethod = PredictionServiceGrpc.getPredictMethod) == null) { PredictionServiceGrpc.getPredictMethod = getPredictMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName( "tensorflow.serving.PredictionService", "Predict")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( tensorflow.serving.Predict.PredictRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( tensorflow.serving.Predict.PredictResponse.getDefaultInstance())) .setSchemaDescriptor(new PredictionServiceMethodDescriptorSupplier("Predict")) .build(); } } } return getPredictMethod; } private static volatile io.grpc.MethodDescriptor getMultiInferenceMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "MultiInference", requestType = tensorflow.serving.Inference.MultiInferenceRequest.class, responseType = tensorflow.serving.Inference.MultiInferenceResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getMultiInferenceMethod() { io.grpc.MethodDescriptor getMultiInferenceMethod; if ((getMultiInferenceMethod = PredictionServiceGrpc.getMultiInferenceMethod) == null) { synchronized (PredictionServiceGrpc.class) { if ((getMultiInferenceMethod = PredictionServiceGrpc.getMultiInferenceMethod) == null) { PredictionServiceGrpc.getMultiInferenceMethod = getMultiInferenceMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName( "tensorflow.serving.PredictionService", "MultiInference")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( tensorflow.serving.Inference.MultiInferenceRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( tensorflow.serving.Inference.MultiInferenceResponse.getDefaultInstance())) .setSchemaDescriptor(new PredictionServiceMethodDescriptorSupplier("MultiInference")) .build(); } } } return getMultiInferenceMethod; } private static volatile io.grpc.MethodDescriptor getGetModelMetadataMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetModelMetadata", requestType = tensorflow.serving.GetModelMetadata.GetModelMetadataRequest.class, responseType = tensorflow.serving.GetModelMetadata.GetModelMetadataResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor getGetModelMetadataMethod() { io.grpc.MethodDescriptor getGetModelMetadataMethod; if ((getGetModelMetadataMethod = PredictionServiceGrpc.getGetModelMetadataMethod) == null) { synchronized (PredictionServiceGrpc.class) { if ((getGetModelMetadataMethod = PredictionServiceGrpc.getGetModelMetadataMethod) == null) { PredictionServiceGrpc.getGetModelMetadataMethod = getGetModelMetadataMethod = io.grpc.MethodDescriptor.newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName( "tensorflow.serving.PredictionService", "GetModelMetadata")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( tensorflow.serving.GetModelMetadata.GetModelMetadataRequest.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( tensorflow.serving.GetModelMetadata.GetModelMetadataResponse.getDefaultInstance())) .setSchemaDescriptor(new PredictionServiceMethodDescriptorSupplier("GetModelMetadata")) .build(); } } } return getGetModelMetadataMethod; } /** * Creates a new async stub that supports all call types for the service */ public static PredictionServiceStub newStub(io.grpc.Channel channel) { return new PredictionServiceStub(channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static PredictionServiceBlockingStub newBlockingStub( io.grpc.Channel channel) { return new PredictionServiceBlockingStub(channel); } /** * Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static PredictionServiceFutureStub newFutureStub( io.grpc.Channel channel) { return new PredictionServiceFutureStub(channel); } /** *
   * open source marker; do not remove
   * PredictionService provides access to machine-learned models loaded by
   * model_servers.
   * 
*/ public static abstract class PredictionServiceImplBase implements io.grpc.BindableService { /** *
     * Classify.
     * 
*/ public void classify(tensorflow.serving.Classification.ClassificationRequest request, io.grpc.stub.StreamObserver responseObserver) { asyncUnimplementedUnaryCall(getClassifyMethod(), responseObserver); } /** *
     * Regress.
     * 
*/ public void regress(tensorflow.serving.RegressionOuterClass.RegressionRequest request, io.grpc.stub.StreamObserver responseObserver) { asyncUnimplementedUnaryCall(getRegressMethod(), responseObserver); } /** *
     * Predict -- provides access to loaded TensorFlow model.
     * 
*/ public void predict(tensorflow.serving.Predict.PredictRequest request, io.grpc.stub.StreamObserver responseObserver) { asyncUnimplementedUnaryCall(getPredictMethod(), responseObserver); } /** *
     * MultiInference API for multi-headed models.
     * 
*/ public void multiInference(tensorflow.serving.Inference.MultiInferenceRequest request, io.grpc.stub.StreamObserver responseObserver) { asyncUnimplementedUnaryCall(getMultiInferenceMethod(), responseObserver); } /** *
     * GetModelMetadata - provides access to metadata for loaded models.
     * 
*/ public void getModelMetadata(tensorflow.serving.GetModelMetadata.GetModelMetadataRequest request, io.grpc.stub.StreamObserver responseObserver) { asyncUnimplementedUnaryCall(getGetModelMetadataMethod(), responseObserver); } @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getClassifyMethod(), asyncUnaryCall( new MethodHandlers< tensorflow.serving.Classification.ClassificationRequest, tensorflow.serving.Classification.ClassificationResponse>( this, METHODID_CLASSIFY))) .addMethod( getRegressMethod(), asyncUnaryCall( new MethodHandlers< tensorflow.serving.RegressionOuterClass.RegressionRequest, tensorflow.serving.RegressionOuterClass.RegressionResponse>( this, METHODID_REGRESS))) .addMethod( getPredictMethod(), asyncUnaryCall( new MethodHandlers< tensorflow.serving.Predict.PredictRequest, tensorflow.serving.Predict.PredictResponse>( this, METHODID_PREDICT))) .addMethod( getMultiInferenceMethod(), asyncUnaryCall( new MethodHandlers< tensorflow.serving.Inference.MultiInferenceRequest, tensorflow.serving.Inference.MultiInferenceResponse>( this, METHODID_MULTI_INFERENCE))) .addMethod( getGetModelMetadataMethod(), asyncUnaryCall( new MethodHandlers< tensorflow.serving.GetModelMetadata.GetModelMetadataRequest, tensorflow.serving.GetModelMetadata.GetModelMetadataResponse>( this, METHODID_GET_MODEL_METADATA))) .build(); } } /** *
   * open source marker; do not remove
   * PredictionService provides access to machine-learned models loaded by
   * model_servers.
   * 
*/ public static final class PredictionServiceStub extends io.grpc.stub.AbstractStub { private PredictionServiceStub(io.grpc.Channel channel) { super(channel); } private PredictionServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PredictionServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PredictionServiceStub(channel, callOptions); } /** *
     * Classify.
     * 
*/ public void classify(tensorflow.serving.Classification.ClassificationRequest request, io.grpc.stub.StreamObserver responseObserver) { asyncUnaryCall( getChannel().newCall(getClassifyMethod(), getCallOptions()), request, responseObserver); } /** *
     * Regress.
     * 
*/ public void regress(tensorflow.serving.RegressionOuterClass.RegressionRequest request, io.grpc.stub.StreamObserver responseObserver) { asyncUnaryCall( getChannel().newCall(getRegressMethod(), getCallOptions()), request, responseObserver); } /** *
     * Predict -- provides access to loaded TensorFlow model.
     * 
*/ public void predict(tensorflow.serving.Predict.PredictRequest request, io.grpc.stub.StreamObserver responseObserver) { asyncUnaryCall( getChannel().newCall(getPredictMethod(), getCallOptions()), request, responseObserver); } /** *
     * MultiInference API for multi-headed models.
     * 
*/ public void multiInference(tensorflow.serving.Inference.MultiInferenceRequest request, io.grpc.stub.StreamObserver responseObserver) { asyncUnaryCall( getChannel().newCall(getMultiInferenceMethod(), getCallOptions()), request, responseObserver); } /** *
     * GetModelMetadata - provides access to metadata for loaded models.
     * 
*/ public void getModelMetadata(tensorflow.serving.GetModelMetadata.GetModelMetadataRequest request, io.grpc.stub.StreamObserver responseObserver) { asyncUnaryCall( getChannel().newCall(getGetModelMetadataMethod(), getCallOptions()), request, responseObserver); } } /** *
   * open source marker; do not remove
   * PredictionService provides access to machine-learned models loaded by
   * model_servers.
   * 
*/ public static final class PredictionServiceBlockingStub extends io.grpc.stub.AbstractStub { private PredictionServiceBlockingStub(io.grpc.Channel channel) { super(channel); } private PredictionServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PredictionServiceBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PredictionServiceBlockingStub(channel, callOptions); } /** *
     * Classify.
     * 
*/ public tensorflow.serving.Classification.ClassificationResponse classify(tensorflow.serving.Classification.ClassificationRequest request) { return blockingUnaryCall( getChannel(), getClassifyMethod(), getCallOptions(), request); } /** *
     * Regress.
     * 
*/ public tensorflow.serving.RegressionOuterClass.RegressionResponse regress(tensorflow.serving.RegressionOuterClass.RegressionRequest request) { return blockingUnaryCall( getChannel(), getRegressMethod(), getCallOptions(), request); } /** *
     * Predict -- provides access to loaded TensorFlow model.
     * 
*/ public tensorflow.serving.Predict.PredictResponse predict(tensorflow.serving.Predict.PredictRequest request) { return blockingUnaryCall( getChannel(), getPredictMethod(), getCallOptions(), request); } /** *
     * MultiInference API for multi-headed models.
     * 
*/ public tensorflow.serving.Inference.MultiInferenceResponse multiInference(tensorflow.serving.Inference.MultiInferenceRequest request) { return blockingUnaryCall( getChannel(), getMultiInferenceMethod(), getCallOptions(), request); } /** *
     * GetModelMetadata - provides access to metadata for loaded models.
     * 
*/ public tensorflow.serving.GetModelMetadata.GetModelMetadataResponse getModelMetadata(tensorflow.serving.GetModelMetadata.GetModelMetadataRequest request) { return blockingUnaryCall( getChannel(), getGetModelMetadataMethod(), getCallOptions(), request); } } /** *
   * open source marker; do not remove
   * PredictionService provides access to machine-learned models loaded by
   * model_servers.
   * 
*/ public static final class PredictionServiceFutureStub extends io.grpc.stub.AbstractStub { private PredictionServiceFutureStub(io.grpc.Channel channel) { super(channel); } private PredictionServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PredictionServiceFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PredictionServiceFutureStub(channel, callOptions); } /** *
     * Classify.
     * 
*/ public com.google.common.util.concurrent.ListenableFuture classify( tensorflow.serving.Classification.ClassificationRequest request) { return futureUnaryCall( getChannel().newCall(getClassifyMethod(), getCallOptions()), request); } /** *
     * Regress.
     * 
*/ public com.google.common.util.concurrent.ListenableFuture regress( tensorflow.serving.RegressionOuterClass.RegressionRequest request) { return futureUnaryCall( getChannel().newCall(getRegressMethod(), getCallOptions()), request); } /** *
     * Predict -- provides access to loaded TensorFlow model.
     * 
*/ public com.google.common.util.concurrent.ListenableFuture predict( tensorflow.serving.Predict.PredictRequest request) { return futureUnaryCall( getChannel().newCall(getPredictMethod(), getCallOptions()), request); } /** *
     * MultiInference API for multi-headed models.
     * 
*/ public com.google.common.util.concurrent.ListenableFuture multiInference( tensorflow.serving.Inference.MultiInferenceRequest request) { return futureUnaryCall( getChannel().newCall(getMultiInferenceMethod(), getCallOptions()), request); } /** *
     * GetModelMetadata - provides access to metadata for loaded models.
     * 
*/ public com.google.common.util.concurrent.ListenableFuture getModelMetadata( tensorflow.serving.GetModelMetadata.GetModelMetadataRequest request) { return futureUnaryCall( getChannel().newCall(getGetModelMetadataMethod(), getCallOptions()), request); } } private static final int METHODID_CLASSIFY = 0; private static final int METHODID_REGRESS = 1; private static final int METHODID_PREDICT = 2; private static final int METHODID_MULTI_INFERENCE = 3; private static final int METHODID_GET_MODEL_METADATA = 4; private static final class MethodHandlers implements io.grpc.stub.ServerCalls.UnaryMethod, io.grpc.stub.ServerCalls.ServerStreamingMethod, io.grpc.stub.ServerCalls.ClientStreamingMethod, io.grpc.stub.ServerCalls.BidiStreamingMethod { private final PredictionServiceImplBase serviceImpl; private final int methodId; MethodHandlers(PredictionServiceImplBase serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver responseObserver) { switch (methodId) { case METHODID_CLASSIFY: serviceImpl.classify((tensorflow.serving.Classification.ClassificationRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_REGRESS: serviceImpl.regress((tensorflow.serving.RegressionOuterClass.RegressionRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_PREDICT: serviceImpl.predict((tensorflow.serving.Predict.PredictRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_MULTI_INFERENCE: serviceImpl.multiInference((tensorflow.serving.Inference.MultiInferenceRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; case METHODID_GET_MODEL_METADATA: serviceImpl.getModelMetadata((tensorflow.serving.GetModelMetadata.GetModelMetadataRequest) request, (io.grpc.stub.StreamObserver) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver invoke( io.grpc.stub.StreamObserver responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } private static abstract class PredictionServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { PredictionServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return tensorflow.serving.PredictionServiceOuterClass.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("PredictionService"); } } private static final class PredictionServiceFileDescriptorSupplier extends PredictionServiceBaseDescriptorSupplier { PredictionServiceFileDescriptorSupplier() {} } private static final class PredictionServiceMethodDescriptorSupplier extends PredictionServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final String methodName; PredictionServiceMethodDescriptorSupplier(String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (PredictionServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new PredictionServiceFileDescriptorSupplier()) .addMethod(getClassifyMethod()) .addMethod(getRegressMethod()) .addMethod(getPredictMethod()) .addMethod(getMultiInferenceMethod()) .addMethod(getGetModelMetadataMethod()) .build(); } } } return result; } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy