All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hadoop.yarn.proto.ResourceTracker Maven / Gradle / Ivy

There is a newer version: 3.4.0
Show newest version
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: ResourceTracker.proto

package org.apache.hadoop.yarn.proto;

public final class ResourceTracker {
  private ResourceTracker() {}
  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
  }

  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
    registerAllExtensions(
        (org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
  }
  /**
   * Protobuf service {@code hadoop.yarn.ResourceTrackerService}
   */
  public static abstract class ResourceTrackerService
      implements org.apache.hadoop.thirdparty.protobuf.Service {
    protected ResourceTrackerService() {}

    public interface Interface {
      /**
       * rpc registerNodeManager(.hadoop.yarn.RegisterNodeManagerRequestProto) returns (.hadoop.yarn.RegisterNodeManagerResponseProto);
       */
      public abstract void registerNodeManager(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback done);

      /**
       * rpc nodeHeartbeat(.hadoop.yarn.NodeHeartbeatRequestProto) returns (.hadoop.yarn.NodeHeartbeatResponseProto);
       */
      public abstract void nodeHeartbeat(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback done);

      /**
       * rpc unRegisterNodeManager(.hadoop.yarn.UnRegisterNodeManagerRequestProto) returns (.hadoop.yarn.UnRegisterNodeManagerResponseProto);
       */
      public abstract void unRegisterNodeManager(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback done);

    }

    public static org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new ResourceTrackerService() {
        @java.lang.Override
        public  void registerNodeManager(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
          impl.registerNodeManager(controller, request, done);
        }

        @java.lang.Override
        public  void nodeHeartbeat(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
          impl.nodeHeartbeat(controller, request, done);
        }

        @java.lang.Override
        public  void unRegisterNodeManager(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
          impl.unRegisterNodeManager(controller, request, done);
        }

      };
    }

    public static org.apache.hadoop.thirdparty.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new org.apache.hadoop.thirdparty.protobuf.BlockingService() {
        public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.thirdparty.protobuf.Message request)
            throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.registerNodeManager(controller, (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto)request);
            case 1:
              return impl.nodeHeartbeat(controller, (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto)request);
            case 2:
              return impl.unRegisterNodeManager(controller, (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getRequestPrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getResponsePrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * rpc registerNodeManager(.hadoop.yarn.RegisterNodeManagerRequestProto) returns (.hadoop.yarn.RegisterNodeManagerResponseProto);
     */
    public abstract void registerNodeManager(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback done);

    /**
     * rpc nodeHeartbeat(.hadoop.yarn.NodeHeartbeatRequestProto) returns (.hadoop.yarn.NodeHeartbeatResponseProto);
     */
    public abstract void nodeHeartbeat(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback done);

    /**
     * rpc unRegisterNodeManager(.hadoop.yarn.UnRegisterNodeManagerRequestProto) returns (.hadoop.yarn.UnRegisterNodeManagerResponseProto);
     */
    public abstract void unRegisterNodeManager(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback done);

    public static final
        org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.ResourceTracker.getDescriptor().getServices().get(0);
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.thirdparty.protobuf.Message request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<
          org.apache.hadoop.thirdparty.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.registerNodeManager(controller, (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.specializeCallback(
              done));
          return;
        case 1:
          this.nodeHeartbeat(controller, (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.specializeCallback(
              done));
          return;
        case 2:
          this.unRegisterNodeManager(controller, (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getRequestPrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getResponsePrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.yarn.proto.ResourceTracker.ResourceTrackerService implements Interface {
      private Stub(org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;

      public org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void registerNodeManager(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.class,
            org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.getDefaultInstance()));
      }

      public  void nodeHeartbeat(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.class,
            org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.getDefaultInstance()));
      }

      public  void unRegisterNodeManager(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback done) {
        channel.callMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.class,
            org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto registerNodeManager(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto nodeHeartbeat(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto unRegisterNodeManager(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto registerNodeManager(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto nodeHeartbeat(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto unRegisterNodeManager(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.UnRegisterNodeManagerResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceTrackerService)
  }


  public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static  org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\025ResourceTracker.proto\022\013hadoop.yarn\032\'ya" +
      "rn_server_common_service_protos.proto2\350\002" +
      "\n\026ResourceTrackerService\022r\n\023registerNode" +
      "Manager\022,.hadoop.yarn.RegisterNodeManage" +
      "rRequestProto\032-.hadoop.yarn.RegisterNode" +
      "ManagerResponseProto\022`\n\rnodeHeartbeat\022&." +
      "hadoop.yarn.NodeHeartbeatRequestProto\032\'." +
      "hadoop.yarn.NodeHeartbeatResponseProto\022x" +
      "\n\025unRegisterNodeManager\022..hadoop.yarn.Un" +
      "RegisterNodeManagerRequestProto\032/.hadoop" +
      ".yarn.UnRegisterNodeManagerResponseProto" +
      "B5\n\034org.apache.hadoop.yarn.protoB\017Resour" +
      "ceTracker\210\001\001\240\001\001"
    };
    org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
        new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor.    InternalDescriptorAssigner() {
          public org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry assignDescriptors(
              org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor root) {
            descriptor = root;
            return null;
          }
        };
    org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.getDescriptor(),
        }, assigner);
    org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.getDescriptor();
  }

  // @@protoc_insertion_point(outer_class_scope)
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy