Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.azure.ai.metricsadvisor.MetricsAdvisorAsyncClient Maven / Gradle / Ivy
Go to download
This package contains the Microsoft Azure Cognitive Services Metrics Advisor SDK.
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.ai.metricsadvisor;
import com.azure.ai.metricsadvisor.implementation.MetricsAdvisorImpl;
import com.azure.ai.metricsadvisor.implementation.models.AlertingResultQuery;
import com.azure.ai.metricsadvisor.implementation.models.AnomalyDimensionQuery;
import com.azure.ai.metricsadvisor.implementation.models.AnomalyFeedback;
import com.azure.ai.metricsadvisor.implementation.models.AnomalyFeedbackValue;
import com.azure.ai.metricsadvisor.implementation.models.ChangePointFeedback;
import com.azure.ai.metricsadvisor.implementation.models.ChangePointFeedbackValue;
import com.azure.ai.metricsadvisor.implementation.models.CommentFeedback;
import com.azure.ai.metricsadvisor.implementation.models.CommentFeedbackValue;
import com.azure.ai.metricsadvisor.implementation.models.DetectionAnomalyResultQuery;
import com.azure.ai.metricsadvisor.implementation.models.DetectionIncidentResultQuery;
import com.azure.ai.metricsadvisor.implementation.models.DetectionSeriesQuery;
import com.azure.ai.metricsadvisor.implementation.models.EnrichmentStatusQueryOption;
import com.azure.ai.metricsadvisor.implementation.models.FeedbackDimensionFilter;
import com.azure.ai.metricsadvisor.implementation.models.MetricDataQueryOptions;
import com.azure.ai.metricsadvisor.implementation.models.MetricDimensionQueryOptions;
import com.azure.ai.metricsadvisor.implementation.models.MetricFeedbackFilter;
import com.azure.ai.metricsadvisor.implementation.models.MetricSeriesQueryOptions;
import com.azure.ai.metricsadvisor.implementation.models.PeriodFeedback;
import com.azure.ai.metricsadvisor.implementation.models.PeriodFeedbackValue;
import com.azure.ai.metricsadvisor.implementation.models.SeriesIdentity;
import com.azure.ai.metricsadvisor.implementation.models.TimeMode;
import com.azure.ai.metricsadvisor.implementation.util.AnomalyTransforms;
import com.azure.ai.metricsadvisor.implementation.util.DetectionConfigurationTransforms;
import com.azure.ai.metricsadvisor.implementation.util.IncidentHelper;
import com.azure.ai.metricsadvisor.implementation.util.IncidentRootCauseTransforms;
import com.azure.ai.metricsadvisor.implementation.util.IncidentTransforms;
import com.azure.ai.metricsadvisor.implementation.util.MetricEnrichedSeriesDataTransformations;
import com.azure.ai.metricsadvisor.implementation.util.MetricFeedbackTransforms;
import com.azure.ai.metricsadvisor.implementation.util.MetricSeriesDataTransforms;
import com.azure.ai.metricsadvisor.implementation.util.MetricSeriesDefinitionTransforms;
import com.azure.ai.metricsadvisor.models.AnomalyAlert;
import com.azure.ai.metricsadvisor.models.AnomalyIncident;
import com.azure.ai.metricsadvisor.models.DataPointAnomaly;
import com.azure.ai.metricsadvisor.models.DimensionKey;
import com.azure.ai.metricsadvisor.models.EnrichmentStatus;
import com.azure.ai.metricsadvisor.models.IncidentRootCause;
import com.azure.ai.metricsadvisor.models.ListAlertOptions;
import com.azure.ai.metricsadvisor.models.ListAnomaliesAlertedOptions;
import com.azure.ai.metricsadvisor.models.ListAnomaliesDetectedOptions;
import com.azure.ai.metricsadvisor.models.ListAnomalyDimensionValuesOptions;
import com.azure.ai.metricsadvisor.models.ListIncidentsAlertedOptions;
import com.azure.ai.metricsadvisor.models.ListIncidentsDetectedOptions;
import com.azure.ai.metricsadvisor.models.ListMetricDimensionValuesOptions;
import com.azure.ai.metricsadvisor.models.ListMetricEnrichmentStatusOptions;
import com.azure.ai.metricsadvisor.models.ListMetricFeedbackOptions;
import com.azure.ai.metricsadvisor.models.ListMetricSeriesDefinitionOptions;
import com.azure.ai.metricsadvisor.models.MetricAnomalyFeedback;
import com.azure.ai.metricsadvisor.models.MetricChangePointFeedback;
import com.azure.ai.metricsadvisor.models.MetricCommentFeedback;
import com.azure.ai.metricsadvisor.models.MetricEnrichedSeriesData;
import com.azure.ai.metricsadvisor.models.MetricFeedback;
import com.azure.ai.metricsadvisor.models.MetricPeriodFeedback;
import com.azure.ai.metricsadvisor.models.MetricSeriesData;
import com.azure.ai.metricsadvisor.models.MetricSeriesDefinition;
import com.azure.ai.metricsadvisor.models.MetricsAdvisorResponseException;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceClient;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.HttpPipeline;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedResponse;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.SimpleResponse;
import com.azure.core.util.Context;
import com.azure.core.util.CoreUtils;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import reactor.core.publisher.Mono;
import java.time.OffsetDateTime;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.UUID;
import java.util.stream.Collectors;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.getEnrichmentStatusQueryOptions;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.getListAnomaliesDetectedOptions;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.getListAnomalyDimensionValuesOptions;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.getListIncidentsDetectedOptions;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.getMetricDataQueryOptions;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.getMetricDimensionQueryOptions;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.getMetricSeriesQueryOptions;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.parseOperationId;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.toStringOrNull;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.validateActiveSinceInput;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.validateAnomalyDimensionValuesInputs;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.validateAnomalyIncidentRootCausesInputs;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.validateIncidentsForDetectionConfigInputs;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.validateListAlertsInputs;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.validateListAnomaliesInputs;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.validateMetricEnrichedSeriesInputs;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.validateMetricEnrichmentStatusInputs;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.validateMetricSeriesInputs;
import static com.azure.ai.metricsadvisor.implementation.util.Utility.validateStartEndTime;
import static com.azure.core.util.FluxUtil.monoError;
import static com.azure.core.util.FluxUtil.withContext;
/**
* This class provides an asynchronous client to connect to the Metrics Advisor Azure Cognitive Service.
* This client provides asynchronous methods to perform:
*
* Analyze root cause into specific dimension using the
* {@link com.azure.ai.metricsadvisor.MetricsAdvisorAsyncClient#listIncidentRootCauses(String, String)}
* method with your respective data source.
* Fetch incidents triggered for a particular detection configuration using the
* {@link com.azure.ai.metricsadvisor.MetricsAdvisorAsyncClient#listIncidentsForDetectionConfig(String, OffsetDateTime, OffsetDateTime)}
* method.
* Fetch all the anomalies detected for a particular detection configuration using the
* {@link com.azure.ai.metricsadvisor.MetricsAdvisorAsyncClient#listAnomaliesForDetectionConfig(String, OffsetDateTime, OffsetDateTime)}
* method./li>
*
*
* Service clients are the point of interaction for developers to use Azure Metrics Advisor.
* {@link com.azure.ai.metricsadvisor.MetricsAdvisorClient} is the synchronous service client and
* {@link com.azure.ai.metricsadvisor.MetricsAdvisorAsyncClient} is the asynchronous service client.
* The examples shown in this document use a credential object named DefaultAzureCredential for authentication, which is
* appropriate for most scenarios, including local development and production environments. Additionally, we
* recommend using
* managed identity
* for authentication in production environments.
* You can find more information on different ways of authenticating and their corresponding credential types in the
* Azure Identity documentation" .
*
*
* Sample: Construct a MetricsAdvisorAsyncClient with DefaultAzureCredential
*
* The following code sample demonstrates the creation of a
* {@link com.azure.ai.metricsadvisor.MetricsAdvisorAsyncClient}, using the `DefaultAzureCredentialBuilder` to configure it.
*
*
*
* MetricsAdvisorAsyncClient metricsAdvisorAsyncClient =
* new MetricsAdvisorClientBuilder()
* .credential(new DefaultAzureCredentialBuilder().build())
* .endpoint("{endpoint}")
* .buildAsyncClient();
*
*
*
* Further, see the code sample below to use
* {@link com.azure.ai.metricsadvisor.models.MetricsAdvisorKeyCredential MetricsAdvisorKeyCredential} for client creation.
*
*
*
* MetricsAdvisorAsyncClient metricsAdvisorAsyncClient =
* new MetricsAdvisorClientBuilder()
* .credential(new MetricsAdvisorKeyCredential("{subscription_key}", "{api_key}"))
* .endpoint("{endpoint}")
* .buildAsyncClient();
*
*
*
* @see com.azure.ai.metricsadvisor
* @see MetricsAdvisorClientBuilder
* @see MetricsAdvisorClient
*/
@ServiceClient(builder = MetricsAdvisorClientBuilder.class, isAsync = true)
public final class MetricsAdvisorAsyncClient {
final ClientLogger logger = new ClientLogger(MetricsAdvisorAsyncClient.class);
private final MetricsAdvisorImpl service;
/**
* Create a {@link MetricsAdvisorAsyncClient} that sends requests to the Metrics Advisor
* service's endpoint. Each service call goes through the
* {@link MetricsAdvisorClientBuilder#pipeline(HttpPipeline)} http pipeline}.
*
* @param service The proxy service used to perform REST calls.
* @param serviceVersion The versions of Azure Metrics Advisor supported by this client library.
*/
MetricsAdvisorAsyncClient(MetricsAdvisorImpl service, MetricsAdvisorServiceVersion serviceVersion) {
this.service = service;
}
/**
* List series definition for a metric.
*
* Code sample
*
*
* String metricId = "b460abfc-7a58-47d7-9d99-21ee21fdfc6e";
* final OffsetDateTime activeSince = OffsetDateTime.parse("2020-07-10T00:00:00Z");
*
* metricsAdvisorAsyncClient.listMetricSeriesDefinitions(metricId, activeSince)
* .subscribe(metricSeriesDefinition -> {
* System.out.printf("Data Feed Metric id for the retrieved series definition : %s%n",
* metricSeriesDefinition.getMetricId());
* System.out.printf("Series Key:");
* System.out.println(metricSeriesDefinition.getSeriesKey().asMap());
* });
*
*
*
* @param metricId metric unique id.
* @param activeSince the start time for querying series ingested after this time.
*
* @return A {@link PagedFlux} of the {@link MetricSeriesDefinition metric series definitions}.
* @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation.
* @throws MetricsAdvisorResponseException thrown if the request is rejected by server.
* @throws NullPointerException thrown if the {@code metricId} or {@code activeSince}
* is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listMetricSeriesDefinitions(String metricId, OffsetDateTime activeSince) {
return listMetricSeriesDefinitions(metricId, activeSince, null);
}
/**
* List series definition for a metric.
*
* Code sample
*
*
* String metricId = "b460abfc-7a58-47d7-9d99-21ee21fdfc6e";
* final OffsetDateTime activeSince = OffsetDateTime.parse("2020-07-10T00:00:00Z");
* final ListMetricSeriesDefinitionOptions options
* = new ListMetricSeriesDefinitionOptions()
* .setMaxPageSize(10)
* .setDimensionCombinationToFilter(new HashMap<String, List<String>>() {{
* put("Dim2", Collections.singletonList("Angelfish"));
* }});
*
* metricsAdvisorAsyncClient.listMetricSeriesDefinitions(metricId, activeSince, options)
* .subscribe(metricSeriesDefinition -> {
* System.out.printf("Data Feed Metric id for the retrieved series definition : %s%n",
* metricSeriesDefinition.getMetricId());
* System.out.printf("Series Key:");
* System.out.println(metricSeriesDefinition.getSeriesKey().asMap());
* });
*
*
*
* @param metricId metric unique id.
* @param activeSince the start time for querying series ingested after this time.
* @param options the additional filtering attributes that can be provided to query the series.
*
* @return A {@link PagedFlux} of the {@link MetricSeriesDefinition metric series definitions}.
* @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation.
* @throws MetricsAdvisorResponseException thrown if the request is rejected by server.
* @throws NullPointerException thrown if the {@code metricId} or {@code activeSince}
* is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listMetricSeriesDefinitions(String metricId, OffsetDateTime activeSince,
ListMetricSeriesDefinitionOptions options) {
try {
return new PagedFlux<>(
() -> withContext(
context -> listMetricSeriesDefinitionSinglePageAsync(metricId, activeSince, options, context)),
continuationToken -> withContext(context -> listMetricSeriesDefinitionNextPageAsync(continuationToken,
activeSince, options, context)));
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> monoError(logger, ex));
}
}
private Mono> listMetricSeriesDefinitionSinglePageAsync(String metricId,
OffsetDateTime activeSince, ListMetricSeriesDefinitionOptions options, Context context) {
validateActiveSinceInput(activeSince, logger);
if (options == null) {
options = new ListMetricSeriesDefinitionOptions();
}
final MetricSeriesQueryOptions metricSeriesQueryOptions = getMetricSeriesQueryOptions(activeSince, options);
return service
.getMetricSeriesSinglePageAsync(UUID.fromString(metricId), metricSeriesQueryOptions, options.getSkip(),
options.getMaxPageSize(), context)
.doOnRequest(ignoredValue -> logger.info("Listing information metric series definitions"))
.doOnSuccess(response -> logger.info("Listed metric series definitions - {}", response))
.doOnError(error -> logger.warning("Failed to list metric series definitions information - {}", error))
.map(MetricSeriesDefinitionTransforms::fromInnerResponse);
}
private Mono> listMetricSeriesDefinitionNextPageAsync(String nextPageLink,
OffsetDateTime activeSince, ListMetricSeriesDefinitionOptions options, Context context) {
if (CoreUtils.isNullOrEmpty(nextPageLink)) {
return Mono.empty();
}
validateActiveSinceInput(activeSince, logger);
if (options == null) {
options = new ListMetricSeriesDefinitionOptions();
}
final MetricSeriesQueryOptions metricSeriesQueryOptions = getMetricSeriesQueryOptions(activeSince, options);
return service.getMetricSeriesNextSinglePageAsync(nextPageLink, metricSeriesQueryOptions, context)
.doOnSubscribe(ignoredValue -> logger.info("Retrieving the next listing page - Page {}", nextPageLink))
.doOnSuccess(response -> logger.info("Retrieved the next listing page - Page {}", nextPageLink))
.doOnError(
error -> logger.warning("Failed to retrieve the next listing page - Page {}", nextPageLink, error))
.map(MetricSeriesDefinitionTransforms::fromInnerResponse);
}
/**
* Get time series data from metric.
*
* Code sample
*
*
* final String metricId = "2dgfbbbb-41ec-a637-677e77b81455";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T12:00:00Z");
*
* final List<DimensionKey> seriesKeyFilter
* = Arrays.asList(new DimensionKey().put("cost", "redmond"));
*
* metricsAdvisorAsyncClient.listMetricSeriesData(metricId, seriesKeyFilter, startTime, endTime)
* .subscribe(metricSeriesData -> {
* System.out.println("List of data points for this series:");
* System.out.println(metricSeriesData.getMetricValues());
* System.out.println("Timestamps of the data related to this time series:");
* System.out.println(metricSeriesData.getTimestamps());
* System.out.printf("Series Key:");
* System.out.println(metricSeriesData.getSeriesKey().asMap());
* });
*
*
*
* @param metricId metric unique id.
* @param seriesKeys the series key to filter.
* This enables additional filtering of dimension values being queried.
* For example, let's say we've the dimensions 'category' and 'city',
* so the api can query value of the dimension 'category', with series key as 'city=redmond'.
*
* @param startTime The start time for querying the time series data.
* @param endTime The end time for querying the time series data.
*
* @return A {@link PagedFlux} of the {@link MetricSeriesData metric series data points}.
* @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation.
* @throws MetricsAdvisorResponseException thrown if the request is rejected by server.
* @throws NullPointerException thrown if the {@code metricId}, {@code startTime} or {@code endTime}
* is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listMetricSeriesData(String metricId, List seriesKeys,
OffsetDateTime startTime, OffsetDateTime endTime) {
try {
return new PagedFlux<>(() -> withContext(
context -> listMetricSeriesDataInternal(metricId, seriesKeys, startTime, endTime, context)), null);
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> monoError(logger, ex));
}
}
private Mono> listMetricSeriesDataInternal(String metricId,
List seriesKeys, OffsetDateTime startTime, OffsetDateTime endTime, Context context) {
validateMetricSeriesInputs(metricId, seriesKeys, startTime, endTime, logger);
List> dimensionList
= seriesKeys.stream().map(DimensionKey::asMap).collect(Collectors.toList());
final MetricDataQueryOptions metricDataQueryOptions = getMetricDataQueryOptions(startTime, dimensionList);
return service.getMetricDataWithResponseAsync(UUID.fromString(metricId), metricDataQueryOptions, context)
.map(MetricSeriesDataTransforms::fromInnerResponse);
}
/**
* List dimension values from certain metric.
*
* Code sample
*
*
*
* metricsAdvisorAsyncClient.listMetricDimensionValues("metricId", "dimension1")
* .subscribe(System.out::println);
*
*
*
* @param metricId metric unique id.
* @param dimensionName the query dimension name.
*
* @return the {@link PagedFlux} of the dimension values for that metric.
* @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation.
* @throws MetricsAdvisorResponseException thrown if the request is rejected by server.
* @throws NullPointerException thrown if the {@code metricId} or {@code dimensionName} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listMetricDimensionValues(final String metricId, final String dimensionName) {
return listMetricDimensionValues(metricId, dimensionName, null);
}
/**
* List dimension values from certain metric.
*
* Code sample
*
*
* metricsAdvisorAsyncClient.listMetricDimensionValues("metricId", "dimension1",
* new ListMetricDimensionValuesOptions().setDimensionValueToFilter("value1").setMaxPageSize(3))
* .subscribe(System.out::println);
*
*
*
* @param metricId metric unique id.
* @param dimensionName the query dimension name.
* @param options the additional filtering parameters to specify while querying.
*
* @return the {@link PagedFlux} of the dimension values for that metric.
* @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation.
* @throws MetricsAdvisorResponseException thrown if the request is rejected by server.
* @throws NullPointerException thrown if the {@code metricId} or {@code dimensionName} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listMetricDimensionValues(final String metricId, final String dimensionName,
final ListMetricDimensionValuesOptions options) {
try {
return new PagedFlux<>(
() -> withContext(
context -> listMetricDimensionValuesSinglePageAsync(metricId, dimensionName, options, context)),
continuationToken -> withContext(context -> listMetricDimensionValuesNextPageAsync(continuationToken,
dimensionName, options, context)));
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> monoError(logger, ex));
}
}
private Mono> listMetricDimensionValuesSinglePageAsync(String metricId, String dimensionName,
ListMetricDimensionValuesOptions options, Context context) {
Objects.requireNonNull(metricId, "'metricId' cannot be null.");
Objects.requireNonNull(dimensionName, "'dimensionName' cannot be null.");
if (options == null) {
options = new ListMetricDimensionValuesOptions();
}
final MetricDimensionQueryOptions metricDimensionQueryOptions
= getMetricDimensionQueryOptions(dimensionName, options);
return service
.getMetricDimensionSinglePageAsync(UUID.fromString(metricId), metricDimensionQueryOptions,
options.getSkip(), options.getMaxPageSize(), context)
.doOnRequest(ignoredValue -> logger.info("Listing all dimension values for a metric"))
.doOnSuccess(response -> logger.info("Listed all dimension values for a metric"))
.doOnError(error -> logger.warning("Failed to list all dimension values for a metric information", error))
.map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue(),
res.getContinuationToken(), null));
}
private Mono> listMetricDimensionValuesNextPageAsync(String nextPageLink,
String dimensionName, ListMetricDimensionValuesOptions options, Context context) {
if (CoreUtils.isNullOrEmpty(nextPageLink)) {
return Mono.empty();
}
if (options == null) {
options = new ListMetricDimensionValuesOptions();
}
final MetricDimensionQueryOptions metricDimensionQueryOptions
= getMetricDimensionQueryOptions(dimensionName, options);
return service.getMetricDimensionNextSinglePageAsync(nextPageLink, metricDimensionQueryOptions, context)
.doOnSubscribe(ignoredValue -> logger.info("Retrieving the next listing page - Page {}", nextPageLink))
.doOnSuccess(response -> logger.info("Retrieved the next listing page - Page {}", nextPageLink))
.doOnError(
error -> logger.warning("Failed to retrieve the next listing page - Page {}", nextPageLink, error))
.map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue(),
res.getContinuationToken(), null));
}
/**
* List the enrichment status for a metric.
*
* Code sample
*
*
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-01-01T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final String metricId = "d3gh4i4-b804-4ab9-a70f-0da0c89cft3l";
*
* metricsAdvisorAsyncClient.listMetricEnrichmentStatus(metricId, startTime, endTime)
* .subscribe(enrichmentStatus -> {
* System.out.printf("Data Feed Metric enrichment status : %s%n", enrichmentStatus.getStatus());
* System.out.printf("Data Feed Metric enrichment status message: %s%n", enrichmentStatus.getMessage());
* System.out.printf("Data Feed Metric enrichment status data slice timestamp : %s%n",
* enrichmentStatus.getTimestamp());
* });
*
*
*
* @param metricId metric unique id.
* @param startTime The start time for querying the time series data.
* @param endTime The end time for querying the time series data.
*
* @return the list of enrichment status's for the specified metric.
* @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation.
* @throws MetricsAdvisorResponseException thrown if the request is rejected by server.
* @throws NullPointerException thrown if {@code metricId}, {@code startTime} and {@code endTime}
* is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listMetricEnrichmentStatus(String metricId, OffsetDateTime startTime,
OffsetDateTime endTime) {
return listMetricEnrichmentStatus(metricId, startTime, endTime, null);
}
/**
* List the enrichment status for a metric.
*
* Code sample
*
*
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-01-01T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final String metricId = "d3gh4i4-b804-4ab9-a70f-0da0c89cft3l";
* final ListMetricEnrichmentStatusOptions options = new ListMetricEnrichmentStatusOptions().setMaxPageSize(10);
*
* metricsAdvisorAsyncClient.listMetricEnrichmentStatus(metricId, startTime, endTime, options)
* .subscribe(enrichmentStatus -> {
* System.out.printf("Data Feed Metric enrichment status : %s%n", enrichmentStatus.getStatus());
* System.out.printf("Data Feed Metric enrichment status message: %s%n", enrichmentStatus.getMessage());
* System.out.printf("Data Feed Metric enrichment status data slice timestamp : %s%n",
* enrichmentStatus.getTimestamp());
* });
*
*
*
* @param metricId metric unique id.
* @param startTime The start time for querying the time series data.
* @param endTime The end time for querying the time series data.
* @param options th e additional configurable options to specify when querying the result..
*
* @return the list of enrichment status's for the specified metric.
* @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation.
* @throws MetricsAdvisorResponseException thrown if the request is rejected by server.
* @throws NullPointerException thrown if {@code metricId}, {@code startTime} and {@code endTime}
* is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listMetricEnrichmentStatus(String metricId, OffsetDateTime startTime,
OffsetDateTime endTime, ListMetricEnrichmentStatusOptions options) {
try {
return new PagedFlux<>(() -> withContext(
context -> listMetricEnrichmentStatusSinglePageAsync(metricId, startTime, endTime, options, context)),
continuationToken -> withContext(context -> listMetricEnrichmentStatusNextPageAsync(continuationToken,
startTime, endTime, context)));
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> monoError(logger, ex));
}
}
private Mono> listMetricEnrichmentStatusSinglePageAsync(String metricId,
OffsetDateTime startTime, OffsetDateTime endTime, ListMetricEnrichmentStatusOptions options, Context context) {
validateMetricEnrichmentStatusInputs(metricId, "'metricId' is required.", startTime, endTime);
if (options == null) {
options = new ListMetricEnrichmentStatusOptions();
}
final EnrichmentStatusQueryOption enrichmentStatusQueryOption
= getEnrichmentStatusQueryOptions(startTime, endTime);
return service
.getEnrichmentStatusByMetricSinglePageAsync(UUID.fromString(metricId), enrichmentStatusQueryOption,
options.getSkip(), options.getMaxPageSize(), context)
.doOnRequest(ignoredValue -> logger.info("Listing all metric enrichment status values for a metric"))
.doOnSuccess(
response -> logger.info("Listed all metric enrichment status values for a metric - {}", response))
.doOnError(
error -> logger.warning("Failed to list all metric enrichment values for a metric information", error))
.map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue(),
res.getContinuationToken(), null));
}
private Mono> listMetricEnrichmentStatusNextPageAsync(String nextPageLink,
OffsetDateTime startTime, OffsetDateTime endTime, Context context) {
if (CoreUtils.isNullOrEmpty(nextPageLink)) {
return Mono.empty();
}
validateStartEndTime(startTime, endTime);
final EnrichmentStatusQueryOption enrichmentStatusQueryOption
= getEnrichmentStatusQueryOptions(startTime, endTime);
return service
.getEnrichmentStatusByMetricNextSinglePageAsync(nextPageLink, enrichmentStatusQueryOption, context)
.doOnSubscribe(ignoredValue -> logger.info("Retrieving the next listing page - Page {}", nextPageLink))
.doOnSuccess(response -> logger.info("Retrieved the next listing page - Page {}", nextPageLink))
.doOnError(
error -> logger.warning("Failed to retrieve the next listing page - Page {}", nextPageLink, error))
.map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue(),
res.getContinuationToken(), null));
}
/**
* Given a list of time series keys, retrieve time series version enriched using
* a detection configuration.
*
* Code sample
*
*
* final String detectionConfigurationId = "e87d899d-a5a0-4259-b752-11aea34d5e34";
* final DimensionKey seriesKey = new DimensionKey()
* .put("Dim1", "Common Lime")
* .put("Dim2", "Antelope");
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-08-12T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-12T00:00:00Z");
*
* PagedFlux<MetricEnrichedSeriesData> enrichedDataFlux
* = metricsAdvisorAsyncClient.listMetricEnrichedSeriesData(detectionConfigurationId,
* Arrays.asList(seriesKey),
* startTime,
* endTime);
*
* enrichedDataFlux.subscribe(enrichedData -> {
* System.out.printf("Series Key %s%n:", enrichedData.getSeriesKey().asMap());
* System.out.println("List of data points for this series");
* System.out.println(enrichedData.getMetricValues());
* System.out.println("Timestamps of the data related to this time series:");
* System.out.println(enrichedData.getTimestamps());
* System.out.println("The expected values of the data points calculated by the smart detector:");
* System.out.println(enrichedData.getExpectedMetricValues());
* System.out.println("The lower boundary values of the data points calculated by smart detector:");
* System.out.println(enrichedData.getLowerBoundaryValues());
* System.out.println("the periods calculated for the data points in the time series:");
* System.out.println(enrichedData.getPeriods());
* });
*
*
*
* @param detectionConfigurationId The id of the configuration used to enrich the time series
* identified by the keys in {@code seriesKeys}.
* @param seriesKeys The time series key list, each key identifies a specific time series.
* @param startTime The start time of the time range within which the enriched data is returned.
* @param endTime The end time of the time range within which the enriched data is returned.
* @return The enriched time series.
* @throws IllegalArgumentException thrown if {@code detectionConfigurationId} fail the UUID format validation
* or if {@code seriesKeys} is empty.
* @throws NullPointerException thrown if the {@code detectionConfigurationId}
* or {@code startTime} or {@code endTime} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listMetricEnrichedSeriesData(String detectionConfigurationId,
List seriesKeys, OffsetDateTime startTime, OffsetDateTime endTime) {
try {
return new PagedFlux<>(
() -> withContext(context -> listMetricEnrichedSeriesDataInternal(detectionConfigurationId, seriesKeys,
startTime, endTime, context)),
null);
} catch (RuntimeException e) {
return new PagedFlux<>(() -> monoError(logger, e));
}
}
private Mono> listMetricEnrichedSeriesDataInternal(
String detectionConfigurationId, List seriesKeys, OffsetDateTime startTime,
OffsetDateTime endTime, Context context) {
validateMetricEnrichedSeriesInputs(detectionConfigurationId, seriesKeys, startTime, endTime, logger);
final List innerSeriesKeys = seriesKeys.stream()
.map(seriesId -> new SeriesIdentity().setDimension(seriesId.asMap()))
.collect(Collectors.toList());
DetectionSeriesQuery query
= new DetectionSeriesQuery().setSeries(innerSeriesKeys).setStartTime(startTime).setEndTime(endTime);
return service
.getSeriesByAnomalyDetectionConfigurationWithResponseAsync(UUID.fromString(detectionConfigurationId), query,
context)
.doOnSubscribe(ignoredValue -> logger.info("Retrieving the EnrichedSeries"))
.doOnSuccess(response -> logger.info("Retrieved the EnrichedSeries {}", response))
.doOnError(error -> logger.warning("Failed to retrieve EnrichedSeries", error))
.map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(),
MetricEnrichedSeriesDataTransformations.fromInnerList(res.getValue()), null, null));
}
/**
* Fetch the anomalies identified by an anomaly detection configuration.
*
* Code sample
*
*
* final String detectionConfigurationId = "c0f2539f-b804-4ab9-a70f-0da0c89c76d8";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T12:00:00Z");
*
* metricsAdvisorAsyncClient.listAnomaliesForDetectionConfig(detectionConfigurationId,
* startTime, endTime)
* .subscribe(anomaly -> {
* System.out.printf("DataPoint Anomaly AnomalySeverity: %s%n", anomaly.getSeverity());
* System.out.printf("Series Key:");
* DimensionKey seriesKey = anomaly.getSeriesKey();
* for (Map.Entry<String, String> dimension : seriesKey.asMap().entrySet()) {
* System.out.printf("DimensionName: %s DimensionValue:%s%n",
* dimension.getKey(), dimension.getValue());
* }
* });
*
*
*
* @param detectionConfigurationId The anomaly detection configuration id.
* @param startTime The start time of the time range within which the anomalies were detected.
* @param endTime The end time of the time range within which the anomalies were detected.
*
* @return The anomalies.
* @throws IllegalArgumentException thrown if {@code detectionConfigurationId} does not conform
* to the UUID format specification
* or {@code options.filter} is used to set severity but either min or max severity is missing.
* @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code options}
* or {@code startTime} or {@code endTime} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listAnomaliesForDetectionConfig(String detectionConfigurationId,
OffsetDateTime startTime, OffsetDateTime endTime) {
return listAnomaliesForDetectionConfig(detectionConfigurationId, startTime, endTime, null);
}
/**
* Fetch the anomalies identified by an anomaly detection configuration.
*
* Code sample
*
*
* final String detectionConfigurationId = "c0f2539f-b804-4ab9-a70f-0da0c89c76d8";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T12:00:00Z");
* final ListAnomaliesDetectedFilter filter = new ListAnomaliesDetectedFilter()
* .setSeverityRange(AnomalySeverity.LOW, AnomalySeverity.MEDIUM);
* final ListAnomaliesDetectedOptions options = new ListAnomaliesDetectedOptions()
* .setMaxPageSize(10)
* .setFilter(filter);
* metricsAdvisorAsyncClient.listAnomaliesForDetectionConfig(detectionConfigurationId,
* startTime, endTime, options)
* .subscribe(anomaly -> {
* System.out.printf("DataPoint Anomaly AnomalySeverity: %s%n", anomaly.getSeverity());
* System.out.printf("Series Key:");
* DimensionKey seriesKey = anomaly.getSeriesKey();
* for (Map.Entry<String, String> dimension : seriesKey.asMap().entrySet()) {
* System.out.printf("DimensionName: %s DimensionValue:%s%n",
* dimension.getKey(), dimension.getValue());
* }
* });
*
*
*
* @param detectionConfigurationId The anomaly detection configuration id.
* @param startTime The start time of the time range within which the anomalies were detected.
* @param endTime The end time of the time range within which the anomalies were detected.
* @param options The additional parameters.
*
* @return The anomalies.
* @throws IllegalArgumentException thrown if {@code detectionConfigurationId} does not conform
* to the UUID format specification
* or {@code options.filter} is used to set severity but either min or max severity is missing.
* @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code options}
* or {@code startTime} or {@code endTime} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listAnomaliesForDetectionConfig(String detectionConfigurationId,
OffsetDateTime startTime, OffsetDateTime endTime, ListAnomaliesDetectedOptions options) {
try {
return new PagedFlux<>(
() -> withContext(context -> listAnomaliesForDetectionConfigSinglePageAsync(detectionConfigurationId,
startTime, endTime, options, context)),
continuationToken -> withContext(context -> listAnomaliesForDetectionConfigNextPageAsync(
continuationToken, startTime, endTime, options, context)));
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> FluxUtil.monoError(logger, ex));
}
}
private Mono> listAnomaliesForDetectionConfigSinglePageAsync(
String detectionConfigurationId, OffsetDateTime startTime, OffsetDateTime endTime,
ListAnomaliesDetectedOptions options, Context context) {
Objects.requireNonNull(detectionConfigurationId, "'detectionConfigurationId' is required.");
Objects.requireNonNull(startTime, "'startTime' is required.");
Objects.requireNonNull(endTime, "'endTime' is required.");
DetectionAnomalyResultQuery query
= new DetectionAnomalyResultQuery().setStartTime(startTime).setEndTime(endTime);
options = getListAnomaliesDetectedOptions(options, query, logger);
return service
.getAnomaliesByAnomalyDetectionConfigurationSinglePageAsync(UUID.fromString(detectionConfigurationId),
query, options.getSkip(), options.getMaxPageSize(), context)
.doOnRequest(ignoredValue -> logger.info("Listing anomalies detected"))
.doOnSuccess(response -> logger.info("Listed anomalies {}", response))
.doOnError(error -> logger.warning("Failed to list the anomalies detected", error))
.map(AnomalyTransforms::fromInnerPagedResponse);
}
private Mono> listAnomaliesForDetectionConfigNextPageAsync(String nextPageLink,
OffsetDateTime startTime, OffsetDateTime endTime, ListAnomaliesDetectedOptions options, Context context) {
if (CoreUtils.isNullOrEmpty(nextPageLink)) {
return Mono.empty();
}
DetectionAnomalyResultQuery query
= new DetectionAnomalyResultQuery().setStartTime(startTime).setEndTime(endTime);
getListAnomaliesDetectedOptions(options, query, logger);
return service.getAnomaliesByAnomalyDetectionConfigurationNextSinglePageAsync(nextPageLink, query, context)
.doOnSubscribe(ignoredValue -> logger.info("Retrieving the next listing page - Page {}", nextPageLink))
.doOnSuccess(
response -> logger.info("Retrieved the next listing page - Page {} {}", nextPageLink, response))
.doOnError(
error -> logger.warning("Failed to retrieve the next listing page - Page {}", nextPageLink, error))
.map(AnomalyTransforms::fromInnerPagedResponse);
}
/**
* Fetch the incidents identified by an anomaly detection configuration.
*
* Code sample
*
*
* final String detectionConfigurationId = "c0f2539f-b804-4ab9-a70f-0da0c89c76d8";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T12:00:00Z");
*
* PagedFlux<AnomalyIncident> incidentsFlux
* = metricsAdvisorAsyncClient.listIncidentsForDetectionConfig(detectionConfigurationId, startTime,
* endTime);
*
* incidentsFlux.subscribe(incident -> {
* System.out.printf("Data Feed Metric Id: %s%n", incident.getMetricId());
* System.out.printf("Detection Configuration Id: %s%n", incident.getDetectionConfigurationId());
* System.out.printf("Anomaly Incident Id: %s%n", incident.getId());
* System.out.printf("Anomaly Incident Start Time: %s%n", incident.getStartTime());
* System.out.printf("Anomaly Incident AnomalySeverity: %s%n", incident.getSeverity());
* System.out.printf("Anomaly Incident Status: %s%n", incident.getStatus());
* System.out.printf("Root DataFeedDimension Key: %s%n", incident.getRootDimensionKey().asMap());
* });
*
*
*
* @param detectionConfigurationId The anomaly detection configuration id.
* @param startTime The start time of the time range within which the incidents were detected.
* @param endTime The end time of the time range within which the incidents were detected.
* @return The incidents.
* @throws IllegalArgumentException thrown if {@code detectionConfigurationId} does not conform
* to the UUID format specification.
* @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code options}
* or {@code startTime} or {@code endTime} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listIncidentsForDetectionConfig(String detectionConfigurationId,
OffsetDateTime startTime, OffsetDateTime endTime) {
return listIncidentsForDetectionConfig(detectionConfigurationId, startTime, endTime, null);
}
/**
* Fetch the incidents identified by an anomaly detection configuration.
*
* Code sample
*
*
* final String detectionConfigurationId = "c0f2539f-b804-4ab9-a70f-0da0c89c76d8";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T12:00:00Z");
* final ListIncidentsDetectedOptions options = new ListIncidentsDetectedOptions()
* .setMaxPageSize(1000);
*
* PagedFlux<AnomalyIncident> incidentsFlux
* = metricsAdvisorAsyncClient.listIncidentsForDetectionConfig(detectionConfigurationId, startTime,
* endTime, options);
*
* incidentsFlux.subscribe(incident -> {
* System.out.printf("Data Feed Metric Id: %s%n", incident.getMetricId());
* System.out.printf("Detection Configuration Id: %s%n", incident.getDetectionConfigurationId());
* System.out.printf("Anomaly Incident Id: %s%n", incident.getId());
* System.out.printf("Anomaly Incident Start Time: %s%n", incident.getStartTime());
* System.out.printf("Anomaly Incident AnomalySeverity: %s%n", incident.getSeverity());
* System.out.printf("Anomaly Incident Status: %s%n", incident.getStatus());
* System.out.printf("Root DataFeedDimension Key: %s%n", incident.getRootDimensionKey().asMap());
* });
*
*
*
* @param detectionConfigurationId The anomaly detection configuration id.
* @param startTime The start time of the time range within which the incidents were detected.
* @param endTime The end time of the time range within which the incidents were detected.
* @param options The additional parameters.
* @return The incidents.
* @throws IllegalArgumentException thrown if {@code detectionConfigurationId} does not conform
* to the UUID format specification.
* @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code options}
* or {@code startTime} or {@code endTime} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listIncidentsForDetectionConfig(String detectionConfigurationId,
OffsetDateTime startTime, OffsetDateTime endTime, ListIncidentsDetectedOptions options) {
try {
return new PagedFlux<>(
() -> withContext(context -> listIncidentsForDetectionConfigSinglePageAsync(detectionConfigurationId,
startTime, endTime, options, context)),
continuationToken -> withContext(
context -> listIncidentsForDetectionConfigNextPageAsync(continuationToken, context)));
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> FluxUtil.monoError(logger, ex));
}
}
private Mono> listIncidentsForDetectionConfigSinglePageAsync(
String detectionConfigurationId, OffsetDateTime startTime, OffsetDateTime endTime,
ListIncidentsDetectedOptions options, Context context) {
validateIncidentsForDetectionConfigInputs(detectionConfigurationId, startTime, endTime);
DetectionIncidentResultQuery query
= new DetectionIncidentResultQuery().setStartTime(startTime).setEndTime(endTime);
options = getListIncidentsDetectedOptions(options, query);
return service
.getIncidentsByAnomalyDetectionConfigurationSinglePageAsync(UUID.fromString(detectionConfigurationId),
query, options.getMaxPageSize(), context)
.doOnRequest(ignoredValue -> logger.info("Listing incidents detected"))
.doOnSuccess(response -> logger.info("Listed incidents {}", response))
.doOnError(error -> logger.warning("Failed to list the incidents detected", error))
.map(IncidentTransforms::fromInnerPagedResponse);
}
private Mono> listIncidentsForDetectionConfigNextPageAsync(String nextPageLink,
Context context) {
if (CoreUtils.isNullOrEmpty(nextPageLink)) {
return Mono.empty();
}
return service.getIncidentsByAnomalyDetectionConfigurationNextSinglePageAsync(nextPageLink, context)
.doOnSubscribe(ignoredValue -> logger.info("Retrieving the next listing page - Page {}", nextPageLink))
.doOnSuccess(
response -> logger.info("Retrieved the next listing page - Page {} {}", nextPageLink, response))
.doOnError(
error -> logger.warning("Failed to retrieve the next listing page - Page {}", nextPageLink, error))
.map(IncidentTransforms::fromInnerPagedResponse);
}
/**
* List the root causes for an incident.
*
* Code sample
*
*
* final String detectionConfigurationId = "c0dddf2539f-b804-4ab9-a70f-0da0c89c76d8";
* final String incidentId = "c5thh0f2539f-b804-4ab9-a70f-0da0c89c456d";
*
* metricsAdvisorAsyncClient.listIncidentRootCauses(detectionConfigurationId, incidentId)
* .subscribe(incidentRootCause -> {
* System.out.printf("Description: %s%n", incidentRootCause.getDescription());
* System.out.println("Series Key:");
* System.out.println(incidentRootCause.getSeriesKey().asMap());
* System.out.printf("Confidence for the detected incident root cause: %.2f%n",
* incidentRootCause.getContributionScore());
* });
*
*
*
*
* @param detectionConfigurationId anomaly detection configuration unique id.
* @param incidentId the incident for which you want to query root causes for.
*
* @return the list of root causes for that incident.
* @throws IllegalArgumentException thrown if {@code detectionConfigurationId} fail the UUID format validation.
* @throws MetricsAdvisorResponseException thrown if the request is rejected by server.
* @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code incidentId} is null.
**/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listIncidentRootCauses(String detectionConfigurationId, String incidentId) {
AnomalyIncident anomalyIncident = new AnomalyIncident();
IncidentHelper.setId(anomalyIncident, incidentId);
IncidentHelper.setDetectionConfigurationId(anomalyIncident, detectionConfigurationId);
try {
return new PagedFlux<>(
() -> withContext(context -> listIncidentRootCausesInternal(anomalyIncident, context)), null);
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> monoError(logger, ex));
}
}
/**
* List the root causes for an anomalyIncident.
*
* Code sample
*
*
* final String detectionConfigurationId = "c0f2539f-b804-4ab9-a70f-0da0c89c76d8";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-01-01T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final ListIncidentsDetectedOptions options
* = new ListIncidentsDetectedOptions()
* .setMaxPageSize(10);
*
* metricsAdvisorAsyncClient.listIncidentsForDetectionConfig(detectionConfigurationId, startTime, endTime,
* options)
* .flatMap(incident -> {
* return metricsAdvisorAsyncClient.listIncidentRootCauses(incident);
* })
* .subscribe(incidentRootCause -> {
* System.out.printf("Description: %s%n", incidentRootCause.getDescription());
* System.out.printf("Series Key:");
* System.out.println(incidentRootCause.getSeriesKey().asMap());
* });
*
*
*
*
* @param anomalyIncident the anomalyIncident for which you want to query root causes for.
*
* @return the list of root causes for that anomalyIncident.
* @throws IllegalArgumentException thrown if {@code detectionConfigurationId} fail the UUID format validation.
* @throws MetricsAdvisorResponseException thrown if the request is rejected by server.
* @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code incidentId} is null.
**/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listIncidentRootCauses(AnomalyIncident anomalyIncident) {
try {
return new PagedFlux<>(
() -> withContext(context -> listIncidentRootCausesInternal(anomalyIncident, context)), null);
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> monoError(logger, ex));
}
}
private Mono> listIncidentRootCausesInternal(AnomalyIncident anomalyIncident,
Context context) {
validateAnomalyIncidentRootCausesInputs(anomalyIncident, logger);
return service
.getRootCauseOfIncidentByAnomalyDetectionConfigurationWithResponseAsync(
UUID.fromString(anomalyIncident.getDetectionConfigurationId()), anomalyIncident.getId(), context)
.doOnSubscribe(ignoredValue -> logger.info("Retrieved the IncidentRootCauses - {}",
anomalyIncident.getDetectionConfigurationId()))
.doOnSuccess(response -> logger.info("Retrieved the IncidentRootCauses - {}", response))
.doOnError(error -> logger.warning("Failed to retrieve the incident root causes - {}",
anomalyIncident.getDetectionConfigurationId(), error))
.map(IncidentRootCauseTransforms::fromInnerResponse);
}
/**
* Fetch dimension values that have anomalies.
*
* Code sample
*
*
* final String detectionConfigurationId = "c0f2539f-b804-4ab9-a70f-0da0c89c76d8";
* final String dimensionName = "Dim1";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-01-01T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
*
* metricsAdvisorAsyncClient.listAnomalyDimensionValues(detectionConfigurationId,
* dimensionName,
* startTime, endTime)
* .subscribe(dimensionValue -> {
* System.out.printf("DataFeedDimension Value: %s%n", dimensionValue);
* });
*
*
*
* @param detectionConfigurationId Identifies the configuration used to detect the anomalies.
* @param dimensionName The dimension name to retrieve the values for.
* @param startTime The start time of the time range within which the anomalies were identified.
* @param endTime The end time of the time range within which the anomalies were identified.
* @return The dimension values with anomalies.
* @throws IllegalArgumentException thrown if {@code detectionConfigurationId} does not conform
* to the UUID format specification.
* @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code dimensionName}
* or {@code options} or {@code startTime} or {@code endTime} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listAnomalyDimensionValues(String detectionConfigurationId, String dimensionName,
OffsetDateTime startTime, OffsetDateTime endTime) {
return listAnomalyDimensionValues(detectionConfigurationId, dimensionName, startTime, endTime, null);
}
/**
* Fetch dimension values that have anomalies.
*
* Code sample
*
*
* final String detectionConfigurationId = "c0f2539f-b804-4ab9-a70f-0da0c89c76d8";
* final String dimensionName = "Dim1";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-01-01T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final ListAnomalyDimensionValuesOptions options
* = new ListAnomalyDimensionValuesOptions()
* .setMaxPageSize(10);
*
* metricsAdvisorAsyncClient.listAnomalyDimensionValues(detectionConfigurationId,
* dimensionName,
* startTime, endTime, options)
* .subscribe(dimensionValue -> {
* System.out.printf("DataFeedDimension Value: %s%n", dimensionValue);
* });
*
*
*
* @param detectionConfigurationId Identifies the configuration used to detect the anomalies.
* @param dimensionName The dimension name to retrieve the values for.
* @param startTime The start time of the time range within which the anomalies were identified.
* @param endTime The end time of the time range within which the anomalies were identified.
* @param options The additional parameters.
* @return The dimension values with anomalies.
* @throws IllegalArgumentException thrown if {@code detectionConfigurationId} does not conform
* to the UUID format specification.
* @throws NullPointerException thrown if the {@code detectionConfigurationId} or {@code dimensionName}
* or {@code options} or {@code startTime} or {@code endTime} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listAnomalyDimensionValues(String detectionConfigurationId, String dimensionName,
OffsetDateTime startTime, OffsetDateTime endTime, ListAnomalyDimensionValuesOptions options) {
try {
return new PagedFlux<>(
() -> withContext(context -> listAnomalyDimensionValuesSinglePageAsync(detectionConfigurationId,
dimensionName, startTime, endTime, options, context)),
continuationToken -> withContext(context -> listAnomalyDimensionValuesNextPageAsync(continuationToken,
dimensionName, startTime, endTime, options, context)));
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> FluxUtil.monoError(logger, ex));
}
}
private Mono> listAnomalyDimensionValuesSinglePageAsync(String detectionConfigurationId,
String dimensionName, OffsetDateTime startTime, OffsetDateTime endTime,
ListAnomalyDimensionValuesOptions options, Context context) {
validateAnomalyDimensionValuesInputs(detectionConfigurationId, dimensionName, startTime, endTime);
AnomalyDimensionQuery query = new AnomalyDimensionQuery();
query.setDimensionName(dimensionName);
query.setStartTime(startTime);
query.setEndTime(endTime);
options = getListAnomalyDimensionValuesOptions(options, query);
return service
.getDimensionOfAnomaliesByAnomalyDetectionConfigurationSinglePageAsync(
UUID.fromString(detectionConfigurationId), query, options.getSkip(), options.getMaxPageSize(), context)
.doOnRequest(ignoredValue -> logger.info("Listing dimension values with anomalies"))
.doOnSuccess(response -> logger.info("Listed dimension values with anomalies {}", response))
.doOnError(error -> logger.warning("Failed to list the dimension values with anomalies", error));
}
private Mono> listAnomalyDimensionValuesNextPageAsync(String nextPageLink,
String dimensionName, OffsetDateTime startTime, OffsetDateTime endTime,
ListAnomalyDimensionValuesOptions options, Context context) {
if (CoreUtils.isNullOrEmpty(nextPageLink)) {
return Mono.empty();
}
AnomalyDimensionQuery query = new AnomalyDimensionQuery();
query.setDimensionName(dimensionName);
query.setStartTime(startTime);
query.setEndTime(endTime);
getListAnomalyDimensionValuesOptions(options, query);
return service
.getDimensionOfAnomaliesByAnomalyDetectionConfigurationNextSinglePageAsync(nextPageLink, query, context)
.doOnSubscribe(ignoredValue -> logger.info("Retrieving the next listing page - Page {}", nextPageLink))
.doOnSuccess(response -> logger.info("Retrieved the next listing page - Page {}", nextPageLink))
.doOnError(
error -> logger.warning("Failed to retrieve the next listing page - Page {}", nextPageLink, error));
}
/**
* Fetch the alerts triggered by an anomaly alert configuration.
*
* Code sample
*
*
* final String alertConfigurationId = "ff3014a0-bbbb-41ec-a637-677e77b81299";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-01-01T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final AlertQueryTimeMode timeMode = AlertQueryTimeMode.ANOMALY_TIME;
*
* metricsAdvisorAsyncClient.listAlerts(alertConfigurationId, startTime, endTime)
* .subscribe(alert -> {
* System.out.printf("Anomaly Alert Id: %s%n", alert.getId());
* System.out.printf("Created Time: %s%n", alert.getCreatedTime());
* System.out.printf("Modified Time: %s%n", alert.getModifiedTime());
* });
*
*
*
* @param alertConfigurationId The anomaly alert configuration id.
* @param startTime The start time of the time range within which the alerts were triggered.
* @param endTime The end time of the time range within which the alerts were triggered.
* @return The alerts.
* @throws IllegalArgumentException thrown if {@code alertConfigurationId} does not conform
* to the UUID format specification.
* @throws NullPointerException thrown if the {@code alertConfigurationId}
* or {@code startTime} or {@code endTime} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listAlerts(String alertConfigurationId, OffsetDateTime startTime,
OffsetDateTime endTime) {
return listAlerts(alertConfigurationId, startTime, endTime, null);
}
/**
* Fetch the alerts triggered by an anomaly alert configuration.
*
* Code sample
*
*
* final String alertConfigurationId = "ff3014a0-bbbb-41ec-a637-677e77b81299";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-01-01T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final AlertQueryTimeMode timeMode = AlertQueryTimeMode.ANOMALY_TIME;
* final ListAlertOptions options = new ListAlertOptions()
* .setAlertQueryTimeMode(timeMode)
* .setMaxPageSize(10);
*
* metricsAdvisorAsyncClient.listAlerts(alertConfigurationId, startTime, endTime, options)
* .subscribe(alert -> {
* System.out.printf("Anomaly Alert Id: %s%n", alert.getId());
* System.out.printf("Created Time: %s%n", alert.getCreatedTime());
* System.out.printf("Modified Time: %s%n", alert.getModifiedTime());
* });
*
*
*
* @param alertConfigurationId The anomaly alert configuration id.
* @param startTime The start time of the time range within which the alerts were triggered.
* @param endTime The end time of the time range within which the alerts were triggered.
* @param options The additional parameters.
* @return The alerts.
* @throws IllegalArgumentException thrown if {@code alertConfigurationId} does not conform
* to the UUID format specification.
* @throws NullPointerException thrown if the {@code alertConfigurationId}
* or {@code startTime} or {@code endTime} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listAlerts(String alertConfigurationId, OffsetDateTime startTime,
OffsetDateTime endTime, ListAlertOptions options) {
try {
return new PagedFlux<>(
() -> withContext(
context -> listAlertsSinglePageAsync(alertConfigurationId, startTime, endTime, options, context)),
continuationToken -> withContext(
context -> listAlertsNextPageAsync(continuationToken, startTime, endTime, options, context)));
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> FluxUtil.monoError(logger, ex));
}
}
private Mono> listAlertsSinglePageAsync(String alertConfigurationId,
OffsetDateTime startTime, OffsetDateTime endTime, ListAlertOptions options, Context context) {
validateListAlertsInputs(alertConfigurationId, startTime, endTime);
if (options == null) {
options = new ListAlertOptions();
}
AlertingResultQuery query = new AlertingResultQuery();
query.setStartTime(startTime);
query.setEndTime(endTime);
query.setTimeMode(TimeMode.fromString(toStringOrNull(options.getTimeMode())));
return service
.getAlertsByAnomalyAlertingConfigurationSinglePageAsync(UUID.fromString(alertConfigurationId), query,
options.getSkip(), options.getMaxPageSize(), context)
.doOnRequest(ignoredValue -> logger.info("Listing alerts"))
.doOnSuccess(response -> logger.info("Listed alerts {}", response))
.doOnError(error -> logger.warning("Failed to list the alerts", error));
}
private Mono> listAlertsNextPageAsync(String nextPageLink, OffsetDateTime startTime,
OffsetDateTime endTime, ListAlertOptions options, Context context) {
if (CoreUtils.isNullOrEmpty(nextPageLink)) {
return Mono.empty();
}
AlertingResultQuery query = new AlertingResultQuery();
query.setStartTime(startTime);
query.setEndTime(endTime);
query.setTimeMode(TimeMode.fromString(toStringOrNull(options.getTimeMode())));
return service.getAlertsByAnomalyAlertingConfigurationNextSinglePageAsync(nextPageLink, query, context)
.doOnSubscribe(ignoredValue -> logger.info("Retrieving the next listing page - Page {}", nextPageLink))
.doOnSuccess(response -> logger.info("Retrieved the next listing page - Page {}", nextPageLink))
.doOnError(
error -> logger.warning("Failed to retrieve the next listing page - Page {}", nextPageLink, error));
}
/**
* Fetch the anomalies in an alert.
*
* Code sample
*
*
* final String alertConfigurationId = "ff3014a0-bbbb-41ec-a637-677e77b81299";
* final String alertId = "1746b031c00";
*
* metricsAdvisorAsyncClient.listAnomaliesForAlert(
* alertConfigurationId,
* alertId)
* .subscribe(anomaly -> {
* System.out.printf("Data Feed Metric Id: %s%n", anomaly.getMetricId());
* System.out.printf("Detection Configuration Id: %s%n", anomaly.getDetectionConfigurationId());
* System.out.printf("DataPoint Anomaly Created Time: %s%n", anomaly.getCreatedTime());
* System.out.printf("DataPoint Anomaly Modified Time: %s%n", anomaly.getModifiedTime());
* System.out.printf("DataPoint Anomaly AnomalySeverity: %s%n", anomaly.getSeverity());
* System.out.printf("DataPoint Anomaly Status: %s%n", anomaly.getStatus());
* System.out.printf("Series Key:");
* DimensionKey seriesKey = anomaly.getSeriesKey();
* for (Map.Entry<String, String> dimension : seriesKey.asMap().entrySet()) {
* System.out.printf("DimensionName: %s DimensionValue:%s%n",
* dimension.getKey(), dimension.getValue());
* }
* });
*
*
*
* @param alertConfigurationId The anomaly alert configuration id.
* @param alertId The alert id.
*
* @return The anomalies.
* @throws IllegalArgumentException thrown if {@code alertConfigurationId} or {@code alertId} does not
* conform to the UUID format specification.
* @throws NullPointerException thrown if the {@code alertConfigurationId} or {@code alertId} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listAnomaliesForAlert(String alertConfigurationId, String alertId) {
return listAnomaliesForAlert(alertConfigurationId, alertId, null);
}
/**
* Fetch the anomalies in an alert.
*
* Code sample
*
*
* final String alertConfigurationId = "ff3014a0-bbbb-41ec-a637-677e77b81299";
* final String alertId = "1746b031c00";
* final ListAnomaliesAlertedOptions options = new ListAnomaliesAlertedOptions()
* .setMaxPageSize(10);
* metricsAdvisorAsyncClient.listAnomaliesForAlert(
* alertConfigurationId,
* alertId,
* options)
* .subscribe(anomaly -> {
* System.out.printf("Data Feed Metric Id: %s%n", anomaly.getMetricId());
* System.out.printf("Detection Configuration Id: %s%n", anomaly.getDetectionConfigurationId());
* System.out.printf("DataPoint Anomaly Created Time: %s%n", anomaly.getCreatedTime());
* System.out.printf("DataPoint Anomaly Modified Time: %s%n", anomaly.getModifiedTime());
* System.out.printf("DataPoint Anomaly AnomalySeverity: %s%n", anomaly.getSeverity());
* System.out.printf("DataPoint Anomaly Status: %s%n", anomaly.getStatus());
* System.out.printf("Series Key:");
* System.out.println(anomaly.getSeriesKey().asMap());
* });
*
*
*
* @param alertConfigurationId The anomaly alert configuration id.
* @param alertId The alert id.
* @param options The additional parameters.
*
* @return The anomalies.
* @throws IllegalArgumentException thrown if {@code alertConfigurationId} or {@code alertId} does not
* conform to the UUID format specification.
* @throws NullPointerException thrown if the {@code alertConfigurationId} or {@code alertId} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listAnomaliesForAlert(String alertConfigurationId, String alertId,
ListAnomaliesAlertedOptions options) {
try {
return new PagedFlux<>(
() -> withContext(
context -> listAnomaliesForAlertSinglePageAsync(alertConfigurationId, alertId, options, context)),
continuationToken -> withContext(
context -> listAnomaliesForAlertNextPageAsync(continuationToken, context)));
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> FluxUtil.monoError(logger, ex));
}
}
private Mono> listAnomaliesForAlertSinglePageAsync(String alertConfigurationId,
String alertId, ListAnomaliesAlertedOptions options, Context context) {
validateListAnomaliesInputs(alertConfigurationId, alertId);
return service
.getAnomaliesFromAlertByAnomalyAlertingConfigurationSinglePageAsync(UUID.fromString(alertConfigurationId),
alertId, options == null ? null : options.getSkip(), options == null ? null : options.getMaxPageSize(),
context)
.doOnRequest(ignoredValue -> logger.info("Listing anomalies for alert"))
.doOnSuccess(response -> logger.info("Listed anomalies {}", response))
.doOnError(error -> logger.warning("Failed to list the anomalies for alert", error))
.map(AnomalyTransforms::fromInnerPagedResponse);
}
private Mono> listAnomaliesForAlertNextPageAsync(String nextPageLink,
Context context) {
if (CoreUtils.isNullOrEmpty(nextPageLink)) {
return Mono.empty();
}
return service.getAnomaliesFromAlertByAnomalyAlertingConfigurationNextSinglePageAsync(nextPageLink, context)
.doOnSubscribe(ignoredValue -> logger.info("Retrieving the next listing page - Page {}", nextPageLink))
.doOnSuccess(
response -> logger.info("Retrieved the next listing page - Page {} {}", nextPageLink, response))
.doOnError(
error -> logger.warning("Failed to retrieve the next listing page - Page {}", nextPageLink, error))
.map(AnomalyTransforms::fromInnerPagedResponse);
}
/**
* Fetch the incidents in an alert.
*
* Code sample
*
*
* final String alertConfigurationId = "ff3014a0-bbbb-41ec-a637-677e77b81299";
* final String alertId = "1746b031c00";
* final ListIncidentsAlertedOptions options = new ListIncidentsAlertedOptions()
* .setMaxPageSize(10);
*
* metricsAdvisorAsyncClient.listIncidentsForAlert(
* alertConfigurationId,
* alertId,
* options)
* .subscribe(incident -> {
* System.out.printf("Data Feed Metric Id: %s%n", incident.getMetricId());
* System.out.printf("Detection Configuration Id: %s%n", incident.getDetectionConfigurationId());
* System.out.printf("Anomaly Incident Id: %s%n", incident.getId());
* System.out.printf("Anomaly Incident Start Time: %s%n", incident.getStartTime());
* System.out.printf("Anomaly Incident AnomalySeverity: %s%n", incident.getSeverity());
* System.out.printf("Anomaly Incident Status: %s%n", incident.getStatus());
* System.out.printf("Root DataFeedDimension Key:");
* DimensionKey rootDimension = incident.getRootDimensionKey();
* for (Map.Entry<String, String> dimension : rootDimension.asMap().entrySet()) {
* System.out.printf("DimensionName: %s DimensionValue:%s%n",
* dimension.getKey(), dimension.getValue());
* }
* });
*
*
*
* @param alertConfigurationId The anomaly alert configuration id.
* @param alertId The alert id.
* @return The incidents.
* @throws IllegalArgumentException thrown if {@code alertConfigurationId} or {@code alertId} does not
* conform to the UUID format specification.
* @throws NullPointerException thrown if the {@code alertConfigurationId} or {@code alertId} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listIncidentsForAlert(String alertConfigurationId, String alertId) {
return listIncidentsForAlert(alertConfigurationId, alertId, null);
}
/**
* Fetch the incidents in an alert.
*
* Code sample
*
*
* final String alertConfigurationId = "ff3014a0-bbbb-41ec-a637-677e77b81299";
* final String alertId = "1746b031c00";
* final ListIncidentsAlertedOptions options = new ListIncidentsAlertedOptions()
* .setMaxPageSize(10);
*
* metricsAdvisorAsyncClient.listIncidentsForAlert(
* alertConfigurationId,
* alertId,
* options)
* .subscribe(incident -> {
* System.out.printf("Data Feed Metric Id: %s%n", incident.getMetricId());
* System.out.printf("Detection Configuration Id: %s%n", incident.getDetectionConfigurationId());
* System.out.printf("Anomaly Incident Id: %s%n", incident.getId());
* System.out.printf("Anomaly Incident Start Time: %s%n", incident.getStartTime());
* System.out.printf("Anomaly Incident AnomalySeverity: %s%n", incident.getSeverity());
* System.out.printf("Anomaly Incident Status: %s%n", incident.getStatus());
* System.out.printf("Root DataFeedDimension Key:");
* DimensionKey rootDimension = incident.getRootDimensionKey();
* for (Map.Entry<String, String> dimension : rootDimension.asMap().entrySet()) {
* System.out.printf("DimensionName: %s DimensionValue:%s%n",
* dimension.getKey(), dimension.getValue());
* }
* });
*
*
*
* @param alertConfigurationId The anomaly alert configuration id.
* @param alertId The alert id.
* @param options The additional parameters.
* @return The incidents.
* @throws IllegalArgumentException thrown if {@code alertConfigurationId} or {@code alertId} does not
* conform to the UUID format specification.
* @throws NullPointerException thrown if the {@code alertConfigurationId} or {@code alertId} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listIncidentsForAlert(String alertConfigurationId, String alertId,
ListIncidentsAlertedOptions options) {
try {
return new PagedFlux<>(
() -> withContext(
context -> listIncidentsForAlertSinglePageAsync(alertConfigurationId, alertId, options, context)),
continuationToken -> withContext(
context -> listIncidentsForAlertNextPageAsync(continuationToken, context)));
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> FluxUtil.monoError(logger, ex));
}
}
private Mono> listIncidentsForAlertSinglePageAsync(String alertConfigurationId,
String alertId, ListIncidentsAlertedOptions options, Context context) {
validateListAnomaliesInputs(alertConfigurationId, alertId);
return service
.getIncidentsFromAlertByAnomalyAlertingConfigurationSinglePageAsync(UUID.fromString(alertConfigurationId),
alertId, options == null ? null : options.getSkip(), options == null ? null : options.getMaxPageSize(),
context)
.doOnRequest(ignoredValue -> logger.info("Listing incidents for alert"))
.doOnSuccess(response -> logger.info("Listed incidents {}", response))
.doOnError(error -> logger.warning("Failed to list the incidents for alert", error))
.map(IncidentTransforms::fromInnerPagedResponse);
}
private Mono> listIncidentsForAlertNextPageAsync(String nextPageLink,
Context context) {
if (CoreUtils.isNullOrEmpty(nextPageLink)) {
return Mono.empty();
}
return service.getIncidentsFromAlertByAnomalyAlertingConfigurationNextSinglePageAsync(nextPageLink, context)
.doOnSubscribe(ignoredValue -> logger.info("Retrieving the next listing page - Page {}", nextPageLink))
.doOnSuccess(response -> logger.info("Retrieved the next listing page - Page {}", nextPageLink))
.doOnError(
error -> logger.warning("Failed to retrieve the next listing page - Page {}", nextPageLink, error))
.map(IncidentTransforms::fromInnerPagedResponse);
}
/**
* Create a new metric feedback.
*
* Code sample
*
*
* final String metricId = "d3gh4i4-b804-4ab9-a70f-0da0c89cft3l";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-01-01T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final MetricChangePointFeedback metricChangePointFeedback
* = new MetricChangePointFeedback(startTime, endTime, ChangePointValue.AUTO_DETECT);
*
* metricsAdvisorAsyncClient.addFeedback(metricId, metricChangePointFeedback)
* .subscribe(metricFeedback -> {
* MetricChangePointFeedback createdMetricChangePointFeedback = (MetricChangePointFeedback) metricFeedback;
* System.out.printf("Data Feed Metric feedback Id: %s%n", createdMetricChangePointFeedback.getId());
* System.out.printf("Data Feed Metric feedback change point value: %s%n",
* createdMetricChangePointFeedback.getChangePointValue().toString());
* System.out.printf("Data Feed Metric feedback start time: %s%n",
* createdMetricChangePointFeedback.getStartTime());
* System.out.printf("Data Feed Metric feedback end time: %s%n",
* createdMetricChangePointFeedback.getEndTime());
* });
*
*
*
* @param metricId the unique id for which the feedback needs to be submitted.
* @param metricFeedback the actual metric feedback.
*
* @return A {@link Mono} containing the created {@link MetricFeedback metric feedback}.
* @throws NullPointerException If {@code metricId}, {@code metricFeedback.dimensionFilter} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono addFeedback(String metricId, MetricFeedback metricFeedback) {
return addFeedbackWithResponse(metricId, metricFeedback).flatMap(FluxUtil::toMono);
}
/**
* Create a new metric feedback.
*
* Code sample
*
*
* final String metricId = "d3gh4i4-b804-4ab9-a70f-0da0c89cft3l";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-01-01T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
* final MetricChangePointFeedback metricChangePointFeedback
* = new MetricChangePointFeedback(startTime, endTime, ChangePointValue.AUTO_DETECT);
*
* metricsAdvisorAsyncClient.addFeedbackWithResponse(metricId, metricChangePointFeedback)
* .subscribe(metricFeedbackResponse -> {
* System.out.printf("Data Feed Metric feedback creation operation status %s%n",
* metricFeedbackResponse.getStatusCode());
* MetricChangePointFeedback createdMetricChangePointFeedback
* = (MetricChangePointFeedback) metricFeedbackResponse.getValue();
* System.out.printf("Data Feed Metric feedback Id: %s%n", createdMetricChangePointFeedback.getId());
* System.out.printf("Data Feed Metric feedback change point value: %s%n",
* createdMetricChangePointFeedback.getChangePointValue().toString());
* System.out.printf("Data Feed Metric feedback start time: %s%n",
* createdMetricChangePointFeedback.getStartTime());
* System.out.printf("Data Feed Metric feedback end time: %s%n",
* createdMetricChangePointFeedback.getEndTime());
* System.out.printf("Data Feed Metric feedback associated dimension filter: %s%n",
* createdMetricChangePointFeedback.getDimensionFilter().asMap());
* });
*
*
*
* @param metricId the unique id for which the feedback needs to be submitted.
* @param metricFeedback the actual metric feedback.
*
* @return A {@link Response} of a {@link Mono} containing the created {@link MetricFeedback metric feedback}.
* @throws NullPointerException If {@code metricId}, {@code metricFeedback},
* {@code metricFeedback.dimensionFilter} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono> addFeedbackWithResponse(String metricId, MetricFeedback metricFeedback) {
try {
return withContext(context -> addFeedbackWithResponse(metricId, metricFeedback, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
private Mono> addFeedbackWithResponse(String metricId, MetricFeedback metricFeedback,
Context context) {
Objects.requireNonNull(metricId, "'metricId' is required.");
Objects.requireNonNull(metricFeedback, "'metricFeedback' is required.");
Objects.requireNonNull(metricFeedback.getDimensionFilter(), "'metricFeedback.dimensionFilter' is required.");
com.azure.ai.metricsadvisor.implementation.models.MetricFeedback innerMetricFeedback;
if (metricFeedback instanceof MetricAnomalyFeedback) {
MetricAnomalyFeedback metricAnomalyFeedback = (MetricAnomalyFeedback) metricFeedback;
Objects.requireNonNull(metricAnomalyFeedback.getStartTime(), "'metricFeedback.startTime' is required.");
Objects.requireNonNull(metricAnomalyFeedback.getEndTime(), "'metricFeedback.endTime' is required.");
Objects.requireNonNull(metricAnomalyFeedback.getAnomalyValue(),
"'metricFeedback.anomalyValue' is required.");
AnomalyFeedback innerAnomalyFeedback
= new AnomalyFeedback().setStartTime(metricAnomalyFeedback.getStartTime())
.setEndTime(metricAnomalyFeedback.getEndTime())
.setValue(new AnomalyFeedbackValue().setAnomalyValue(metricAnomalyFeedback.getAnomalyValue()));
if (metricAnomalyFeedback.getDetectionConfiguration() != null) {
innerAnomalyFeedback
.setAnomalyDetectionConfigurationId(
UUID.fromString(metricAnomalyFeedback.getDetectionConfiguration().getId()))
.setAnomalyDetectionConfigurationSnapshot(DetectionConfigurationTransforms.toInnerForCreate(logger,
metricId, metricAnomalyFeedback.getDetectionConfiguration()));
}
innerMetricFeedback = innerAnomalyFeedback.setMetricId(UUID.fromString(metricId))
.setDimensionFilter(
new FeedbackDimensionFilter().setDimension(metricAnomalyFeedback.getDimensionFilter().asMap()));
} else if (metricFeedback instanceof MetricChangePointFeedback) {
MetricChangePointFeedback metricChangePointFeedback = (MetricChangePointFeedback) metricFeedback;
Objects.requireNonNull(metricChangePointFeedback.getStartTime(), "'metricFeedback.startTime' is required.");
Objects.requireNonNull(metricChangePointFeedback.getEndTime(), "'metricFeedback.endTime' is required.");
Objects.requireNonNull(metricChangePointFeedback.getChangePointValue(),
"'metricFeedback.changePointValue' is required.");
innerMetricFeedback = new ChangePointFeedback().setStartTime(metricChangePointFeedback.getStartTime())
.setEndTime(metricChangePointFeedback.getEndTime())
.setValue(
new ChangePointFeedbackValue().setChangePointValue(metricChangePointFeedback.getChangePointValue()))
.setMetricId(UUID.fromString(metricId))
.setDimensionFilter(
new FeedbackDimensionFilter().setDimension(metricChangePointFeedback.getDimensionFilter().asMap()));
} else if (metricFeedback instanceof MetricPeriodFeedback) {
MetricPeriodFeedback metricPeriodFeedback = (MetricPeriodFeedback) metricFeedback;
Objects.requireNonNull(metricPeriodFeedback.getPeriodType(), "'metricFeedback.periodType' is required.");
Objects.requireNonNull(metricPeriodFeedback.getPeriodValue(), "'metricFeedback.periodValue' is required.");
innerMetricFeedback = new PeriodFeedback()
.setValue(new PeriodFeedbackValue().setPeriodValue(metricPeriodFeedback.getPeriodValue())
.setPeriodType(metricPeriodFeedback.getPeriodType()))
.setMetricId(UUID.fromString(metricId))
.setDimensionFilter(
new FeedbackDimensionFilter().setDimension(metricPeriodFeedback.getDimensionFilter().asMap()));
} else if (metricFeedback instanceof MetricCommentFeedback) {
MetricCommentFeedback metricCommentFeedback = (MetricCommentFeedback) metricFeedback;
Objects.requireNonNull(metricCommentFeedback.getComment(), "'metricFeedback.comment' is required.");
innerMetricFeedback = new CommentFeedback().setStartTime(metricCommentFeedback.getStartTime())
.setEndTime(metricCommentFeedback.getEndTime())
.setValue(new CommentFeedbackValue().setCommentValue(metricCommentFeedback.getComment()))
.setMetricId(UUID.fromString(metricId))
.setDimensionFilter(
new FeedbackDimensionFilter().setDimension(metricCommentFeedback.getDimensionFilter().asMap()));
} else {
throw logger.logExceptionAsError(new IllegalArgumentException("Unknown feedback type."));
}
return service.createMetricFeedbackWithResponseAsync(innerMetricFeedback, context)
.flatMap(createdMetricFeedbackResponse -> getFeedbackWithResponse(
parseOperationId(createdMetricFeedbackResponse.getDeserializedHeaders().getLocation())));
}
/**
* Get a metric feedback by its id.
*
* Code sample
*
*
*
* final String feedbackId = "8i3h4i4-b804-4ab9-a70f-0da0c89cft3l";
* metricsAdvisorAsyncClient.getFeedback(feedbackId)
* .subscribe(metricFeedback -> {
* System.out.printf("Data Feed Metric feedback Id: %s%n", metricFeedback.getId());
* System.out.printf("Data Feed Metric feedback associated dimension filter: %s%n",
* metricFeedback.getDimensionFilter().asMap());
*
* if (PERIOD.equals(metricFeedback.getFeedbackType())) {
* MetricPeriodFeedback createMetricPeriodFeedback
* = (MetricPeriodFeedback) metricFeedback;
* System.out.printf("Data Feed Metric feedback type: %s%n",
* createMetricPeriodFeedback.getPeriodType().toString());
* System.out.printf("Data Feed Metric feedback period value: %d%n",
* createMetricPeriodFeedback.getPeriodValue());
* }
* });
*
*
*
* @param feedbackId The metric feedback unique id.
*
* @return The metric feedback for the provided id.
* @throws IllegalArgumentException If {@code feedbackId} does not conform to the UUID format specification.
* @throws NullPointerException thrown if the {@code feedbackId} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono getFeedback(String feedbackId) {
return getFeedbackWithResponse(feedbackId).flatMap(FluxUtil::toMono);
}
/**
* Get a metric feedback by its id.
*
* Code sample
*
*
*
* final String feedbackId = "8i3h4i4-b804-4ab9-a70f-0da0c89cft3l";
* metricsAdvisorAsyncClient.getFeedbackWithResponse(feedbackId)
* .subscribe(metricFeedbackResponse -> {
* final MetricFeedback metricFeedback = metricFeedbackResponse.getValue();
* System.out.printf("Data Feed Metric feedback Id: %s%n", metricFeedback.getId());
* System.out.printf("Data Feed Metric feedback associated dimension filter: %s%n",
* metricFeedback.getDimensionFilter().asMap());
*
* if (PERIOD.equals(metricFeedback.getFeedbackType())) {
* MetricPeriodFeedback createMetricPeriodFeedback
* = (MetricPeriodFeedback) metricFeedback;
* System.out.printf("Data Feed Metric feedback type: %s%n",
* createMetricPeriodFeedback.getPeriodType().toString());
* System.out.printf("Data Feed Metric feedback period value: %d%n",
* createMetricPeriodFeedback.getPeriodValue());
* }
* });
*
*
*
* @param feedbackId The metric feedback unique id.
*
* @return The metric feedback for the provided id.
* @throws IllegalArgumentException If {@code feedbackId} does not conform to the UUID format specification.
* @throws NullPointerException thrown if the {@code feedbackId} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono> getFeedbackWithResponse(String feedbackId) {
try {
return withContext(context -> getFeedbackWithResponse(feedbackId, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
private Mono> getFeedbackWithResponse(String feedbackId, Context context) {
Objects.requireNonNull(feedbackId, "'feedbackId' is required.");
return service.getMetricFeedbackWithResponseAsync(UUID.fromString(feedbackId), context)
.map(metricFeedbackResponse -> new SimpleResponse<>(metricFeedbackResponse,
MetricFeedbackTransforms.fromInner(metricFeedbackResponse.getValue())));
}
/**
* List information of metrics feedback on the account for a metric Id.
*
* Code sample
*
*
* final String metricId = "d3gh4i4-b804-4ab9-a70f-0da0c89cft3l";
* metricsAdvisorAsyncClient.listFeedback(metricId)
* .subscribe(metricFeedback -> {
* System.out.printf("Data Feed Metric feedback Id: %s%n", metricFeedback.getId());
* System.out.printf("Data Feed Metric feedback associated dimension filter: %s%n",
* metricFeedback.getDimensionFilter().asMap());
*
* if (PERIOD.equals(metricFeedback.getFeedbackType())) {
* MetricPeriodFeedback periodFeedback
* = (MetricPeriodFeedback) metricFeedback;
* System.out.printf("Data Feed Metric feedback type: %s%n",
* periodFeedback.getPeriodType().toString());
* System.out.printf("Data Feed Metric feedback period value: %d%n",
* periodFeedback.getPeriodValue());
* } else if (ANOMALY.equals(metricFeedback.getFeedbackType())) {
* MetricAnomalyFeedback metricAnomalyFeedback
* = (MetricAnomalyFeedback) metricFeedback;
* System.out.printf("Data Feed Metric feedback anomaly value: %s%n",
* metricAnomalyFeedback.getAnomalyValue().toString());
* System.out.printf("Data Feed Metric feedback associated detection configuration: %s%n",
* metricAnomalyFeedback.getDetectionConfigurationId());
* } else if (COMMENT.equals(metricFeedback.getFeedbackType())) {
* MetricCommentFeedback metricCommentFeedback
* = (MetricCommentFeedback) metricFeedback;
* System.out.printf("Data Feed Metric feedback comment value: %s%n",
* metricCommentFeedback.getComment());
* }
* });
*
*
*
* @param metricId the unique metric Id.
*
* @return A {@link PagedFlux} containing information of all the {@link MetricFeedback metric feedbacks}
* in the account.
* @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation.
* @throws MetricsAdvisorResponseException thrown if the request is rejected by server.
* @throws NullPointerException thrown if the {@code metricId} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listFeedback(String metricId) {
return listFeedback(metricId, null);
}
/**
* List information of all metric feedbacks on the metrics advisor account for a metric Id.
*
* Code sample
*
*
* final String metricId = "d3gh4i4-b804-4ab9-a70f-0da0c89cft3l";
* final OffsetDateTime startTime = OffsetDateTime.parse("2020-01-01T00:00:00Z");
* final OffsetDateTime endTime = OffsetDateTime.parse("2020-09-09T00:00:00Z");
*
* metricsAdvisorAsyncClient.listFeedback(metricId,
* new ListMetricFeedbackOptions()
* .setFilter(new ListMetricFeedbackFilter()
* .setStartTime(startTime)
* .setTimeMode(FeedbackQueryTimeMode.FEEDBACK_CREATED_TIME)
* .setEndTime(endTime)))
* .subscribe(metricFeedback -> {
* System.out.printf("Data Feed Metric feedback Id: %s%n", metricFeedback.getId());
* System.out.printf("Data Feed Metric feedback associated dimension filter: %s%n",
* metricFeedback.getDimensionFilter().asMap());
* System.out.printf("Data Feed Metric feedback created time %s%n", metricFeedback.getCreatedTime());
*
* if (PERIOD.equals(metricFeedback.getFeedbackType())) {
* MetricPeriodFeedback periodFeedback
* = (MetricPeriodFeedback) metricFeedback;
* System.out.printf("Data Feed Metric feedback type: %s%n",
* periodFeedback.getPeriodType().toString());
* System.out.printf("Data Feed Metric feedback period value: %d%n",
* periodFeedback.getPeriodValue());
* } else if (ANOMALY.equals(metricFeedback.getFeedbackType())) {
* MetricAnomalyFeedback metricAnomalyFeedback
* = (MetricAnomalyFeedback) metricFeedback;
* System.out.printf("Data Feed Metric feedback anomaly value: %s%n",
* metricAnomalyFeedback.getAnomalyValue().toString());
* System.out.printf("Data Feed Metric feedback associated detection configuration: %s%n",
* metricAnomalyFeedback.getDetectionConfigurationId());
* } else if (COMMENT.equals(metricFeedback.getFeedbackType())) {
* MetricCommentFeedback metricCommentFeedback
* = (MetricCommentFeedback) metricFeedback;
* System.out.printf("Data Feed Metric feedback comment value: %s%n",
* metricCommentFeedback.getComment());
* }
* });
*
*
*
* @param metricId the unique metric Id.
* @param options The configurable {@link ListMetricFeedbackOptions options} to pass for filtering the output
* result.
*
* @return A {@link PagedFlux} containing information of all the {@link MetricFeedback metric feedbacks} in
* the account.
* @throws IllegalArgumentException thrown if {@code metricId} fail the UUID format validation.
* @throws MetricsAdvisorResponseException thrown if the request is rejected by server.
* @throws NullPointerException thrown if the {@code metricId} is null.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux listFeedback(String metricId, ListMetricFeedbackOptions options) {
options = options != null ? options : new ListMetricFeedbackOptions();
try {
final MetricFeedbackFilter metricFeedbackFilter = MetricFeedbackTransforms.toInnerFilter(metricId, options);
final ListMetricFeedbackOptions finalOptions = options;
return new PagedFlux<>(
() -> withContext(context -> listMetricFeedbacksSinglePage(metricFeedbackFilter,
finalOptions.getMaxPageSize(), finalOptions.getSkip(), context)),
continuationToken -> withContext(
context -> listMetricFeedbacksNextPage(continuationToken, metricFeedbackFilter, context)));
} catch (RuntimeException ex) {
return new PagedFlux<>(() -> monoError(logger, ex));
}
}
private Mono> listMetricFeedbacksSinglePage(MetricFeedbackFilter metricFeedbackFilter,
Integer maxPageSize, Integer skip, Context context) {
return service.listMetricFeedbacksSinglePageAsync(metricFeedbackFilter, skip, maxPageSize, context)
.doOnRequest(ignoredValue -> logger.info("Listing information for all metric feedbacks"))
.doOnSuccess(response -> logger.info("Listed metric feedbacks - {}", response))
.doOnError(error -> logger.warning("Failed to list all metric feedbacks information", error))
.map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(),
res.getValue().stream().map(MetricFeedbackTransforms::fromInner).collect(Collectors.toList()),
res.getContinuationToken(), null));
}
private Mono> listMetricFeedbacksNextPage(String nextPageLink,
MetricFeedbackFilter metricFeedbackFilter, Context context) {
if (CoreUtils.isNullOrEmpty(nextPageLink)) {
return Mono.empty();
}
return service.listMetricFeedbacksNextSinglePageAsync(nextPageLink, metricFeedbackFilter, context)
.doOnSubscribe(ignoredValue -> logger.info("Retrieving the next listing page - Page {}", nextPageLink))
.doOnSuccess(response -> logger.info("Retrieved the next listing page - Page {}", nextPageLink))
.doOnError(
error -> logger.warning("Failed to retrieve the next listing page - Page {}", nextPageLink, error))
.map(res -> new PagedResponseBase<>(res.getRequest(), res.getStatusCode(), res.getHeaders(),
res.getValue().stream().map(MetricFeedbackTransforms::fromInner).collect(Collectors.toList()),
res.getContinuationToken(), null));
}
}