All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.azure.resourcemanager.machinelearning.fluent.BatchDeploymentsClient Maven / Gradle / Ivy

Go to download

This package contains Microsoft Azure SDK for Machine Learning Management SDK. For documentation on how to use this package, please see https://aka.ms/azsdk/java/mgmt. These APIs allow end users to operate on Azure Machine Learning Workspace resources. Package tag package-2024-04.

The newest version!
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.

package com.azure.resourcemanager.machinelearning.fluent;

import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.management.polling.PollResult;
import com.azure.core.util.Context;
import com.azure.core.util.polling.SyncPoller;
import com.azure.resourcemanager.machinelearning.fluent.models.BatchDeploymentInner;
import com.azure.resourcemanager.machinelearning.models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties;

/**
 * An instance of this class provides access to all the operations defined in BatchDeploymentsClient.
 */
public interface BatchDeploymentsClient {
    /**
     * Lists Batch inference deployments in the workspace.
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Endpoint name.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return a paginated list of BatchDeployment entities as paginated response with {@link PagedIterable}.
     */
    @ServiceMethod(returns = ReturnType.COLLECTION)
    PagedIterable list(String resourceGroupName, String workspaceName, String endpointName);

    /**
     * Lists Batch inference deployments in the workspace.
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Endpoint name.
     * @param orderBy Ordering of list.
     * @param top Top of list.
     * @param skip Continuation token for pagination.
     * @param context The context to associate with this operation.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return a paginated list of BatchDeployment entities as paginated response with {@link PagedIterable}.
     */
    @ServiceMethod(returns = ReturnType.COLLECTION)
    PagedIterable list(String resourceGroupName, String workspaceName, String endpointName,
        String orderBy, Integer top, String skip, Context context);

    /**
     * Delete Batch Inference deployment (asynchronous).
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Endpoint name.
     * @param deploymentName Inference deployment identifier.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return the {@link SyncPoller} for polling of long-running operation.
     */
    @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
    SyncPoller, Void> beginDelete(String resourceGroupName, String workspaceName, String endpointName,
        String deploymentName);

    /**
     * Delete Batch Inference deployment (asynchronous).
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Endpoint name.
     * @param deploymentName Inference deployment identifier.
     * @param context The context to associate with this operation.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return the {@link SyncPoller} for polling of long-running operation.
     */
    @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
    SyncPoller, Void> beginDelete(String resourceGroupName, String workspaceName, String endpointName,
        String deploymentName, Context context);

    /**
     * Delete Batch Inference deployment (asynchronous).
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Endpoint name.
     * @param deploymentName Inference deployment identifier.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     */
    @ServiceMethod(returns = ReturnType.SINGLE)
    void delete(String resourceGroupName, String workspaceName, String endpointName, String deploymentName);

    /**
     * Delete Batch Inference deployment (asynchronous).
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Endpoint name.
     * @param deploymentName Inference deployment identifier.
     * @param context The context to associate with this operation.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     */
    @ServiceMethod(returns = ReturnType.SINGLE)
    void delete(String resourceGroupName, String workspaceName, String endpointName, String deploymentName,
        Context context);

    /**
     * Gets a batch inference deployment by id.
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Endpoint name.
     * @param deploymentName The identifier for the Batch deployments.
     * @param context The context to associate with this operation.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return a batch inference deployment by id along with {@link Response}.
     */
    @ServiceMethod(returns = ReturnType.SINGLE)
    Response getWithResponse(String resourceGroupName, String workspaceName, String endpointName,
        String deploymentName, Context context);

    /**
     * Gets a batch inference deployment by id.
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Endpoint name.
     * @param deploymentName The identifier for the Batch deployments.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return a batch inference deployment by id.
     */
    @ServiceMethod(returns = ReturnType.SINGLE)
    BatchDeploymentInner get(String resourceGroupName, String workspaceName, String endpointName,
        String deploymentName);

    /**
     * Update a batch inference deployment (asynchronous).
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Inference endpoint name.
     * @param deploymentName The identifier for the Batch inference deployment.
     * @param body Batch inference deployment definition object.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return the {@link SyncPoller} for polling of long-running operation.
     */
    @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
    SyncPoller, BatchDeploymentInner> beginUpdate(String resourceGroupName,
        String workspaceName, String endpointName, String deploymentName,
        PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties body);

    /**
     * Update a batch inference deployment (asynchronous).
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Inference endpoint name.
     * @param deploymentName The identifier for the Batch inference deployment.
     * @param body Batch inference deployment definition object.
     * @param context The context to associate with this operation.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return the {@link SyncPoller} for polling of long-running operation.
     */
    @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
    SyncPoller, BatchDeploymentInner> beginUpdate(String resourceGroupName,
        String workspaceName, String endpointName, String deploymentName,
        PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties body, Context context);

    /**
     * Update a batch inference deployment (asynchronous).
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Inference endpoint name.
     * @param deploymentName The identifier for the Batch inference deployment.
     * @param body Batch inference deployment definition object.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return the response.
     */
    @ServiceMethod(returns = ReturnType.SINGLE)
    BatchDeploymentInner update(String resourceGroupName, String workspaceName, String endpointName,
        String deploymentName, PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties body);

    /**
     * Update a batch inference deployment (asynchronous).
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Inference endpoint name.
     * @param deploymentName The identifier for the Batch inference deployment.
     * @param body Batch inference deployment definition object.
     * @param context The context to associate with this operation.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return the response.
     */
    @ServiceMethod(returns = ReturnType.SINGLE)
    BatchDeploymentInner update(String resourceGroupName, String workspaceName, String endpointName,
        String deploymentName, PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties body, Context context);

    /**
     * Creates/updates a batch inference deployment (asynchronous).
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Inference endpoint name.
     * @param deploymentName The identifier for the Batch inference deployment.
     * @param body Batch inference deployment definition object.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return the {@link SyncPoller} for polling of long-running operation.
     */
    @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
    SyncPoller, BatchDeploymentInner> beginCreateOrUpdate(String resourceGroupName,
        String workspaceName, String endpointName, String deploymentName, BatchDeploymentInner body);

    /**
     * Creates/updates a batch inference deployment (asynchronous).
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Inference endpoint name.
     * @param deploymentName The identifier for the Batch inference deployment.
     * @param body Batch inference deployment definition object.
     * @param context The context to associate with this operation.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return the {@link SyncPoller} for polling of long-running operation.
     */
    @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
    SyncPoller, BatchDeploymentInner> beginCreateOrUpdate(String resourceGroupName,
        String workspaceName, String endpointName, String deploymentName, BatchDeploymentInner body, Context context);

    /**
     * Creates/updates a batch inference deployment (asynchronous).
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Inference endpoint name.
     * @param deploymentName The identifier for the Batch inference deployment.
     * @param body Batch inference deployment definition object.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return the response.
     */
    @ServiceMethod(returns = ReturnType.SINGLE)
    BatchDeploymentInner createOrUpdate(String resourceGroupName, String workspaceName, String endpointName,
        String deploymentName, BatchDeploymentInner body);

    /**
     * Creates/updates a batch inference deployment (asynchronous).
     * 
     * @param resourceGroupName The name of the resource group. The name is case insensitive.
     * @param workspaceName Name of Azure Machine Learning workspace.
     * @param endpointName Inference endpoint name.
     * @param deploymentName The identifier for the Batch inference deployment.
     * @param body Batch inference deployment definition object.
     * @param context The context to associate with this operation.
     * @throws IllegalArgumentException thrown if parameters fail the validation.
     * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
     * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
     * @return the response.
     */
    @ServiceMethod(returns = ReturnType.SINGLE)
    BatchDeploymentInner createOrUpdate(String resourceGroupName, String workspaceName, String endpointName,
        String deploymentName, BatchDeploymentInner body, Context context);
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy