![JAR search and dependency download from the Maven repository](/logo.png)
org.apache.druid.server.ClientInfoResource Maven / Gradle / Ivy
The newest version!
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.server;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.inject.Inject;
import com.sun.jersey.spi.container.ResourceFilters;
import org.apache.druid.client.DruidDataSource;
import org.apache.druid.client.FilteredServerInventoryView;
import org.apache.druid.client.ServerViewUtil;
import org.apache.druid.client.TimelineServerView;
import org.apache.druid.client.selector.ServerSelector;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.JodaUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.query.LocatedSegmentDescriptor;
import org.apache.druid.query.TableDataSource;
import org.apache.druid.query.metadata.SegmentMetadataQueryConfig;
import org.apache.druid.server.http.security.DatasourceResourceFilter;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthorizationUtils;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.TimelineLookup;
import org.apache.druid.timeline.TimelineObjectHolder;
import org.apache.druid.timeline.partition.PartitionHolder;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
*
*/
@Path("/druid/v2/datasources")
public class ClientInfoResource
{
private static final Logger log = new Logger(ClientInfoResource.class);
private static final String KEY_DIMENSIONS = "dimensions";
private static final String KEY_METRICS = "metrics";
private FilteredServerInventoryView serverInventoryView;
private TimelineServerView timelineServerView;
private SegmentMetadataQueryConfig segmentMetadataQueryConfig;
private final AuthConfig authConfig;
private final AuthorizerMapper authorizerMapper;
@Inject
public ClientInfoResource(
FilteredServerInventoryView serverInventoryView,
TimelineServerView timelineServerView,
SegmentMetadataQueryConfig segmentMetadataQueryConfig,
AuthConfig authConfig,
AuthorizerMapper authorizerMapper
)
{
this.serverInventoryView = serverInventoryView;
this.timelineServerView = timelineServerView;
this.segmentMetadataQueryConfig = (segmentMetadataQueryConfig == null) ?
new SegmentMetadataQueryConfig() : segmentMetadataQueryConfig;
this.authConfig = authConfig;
this.authorizerMapper = authorizerMapper;
}
@GET
@Produces(MediaType.APPLICATION_JSON)
public Iterable getDataSources(@Context final HttpServletRequest request)
{
Function> raGenerator = datasourceName -> {
return Collections.singletonList(AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(datasourceName));
};
return AuthorizationUtils.filterAuthorizedResources(
request,
getAllDataSources(),
raGenerator,
authorizerMapper
);
}
private Set getAllDataSources()
{
return serverInventoryView
.getInventory()
.stream()
.flatMap(server -> server.getDataSources().stream().map(DruidDataSource::getName))
.collect(Collectors.toSet());
}
@GET
@Path("/{dataSourceName}")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Map getDatasource(
@PathParam("dataSourceName") String dataSourceName,
@QueryParam("interval") String interval,
@QueryParam("full") String full
)
{
if (full == null) {
return ImmutableMap.of(
KEY_DIMENSIONS, getDataSourceDimensions(dataSourceName, interval),
KEY_METRICS, getDataSourceMetrics(dataSourceName, interval)
);
}
Interval theInterval;
if (interval == null || interval.isEmpty()) {
DateTime now = getCurrentTime();
theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now);
} else {
theInterval = Intervals.of(interval);
}
final Optional extends TimelineLookup> maybeTimeline =
timelineServerView.getTimeline((new TableDataSource(dataSourceName)).getAnalysis());
final Optional>> maybeServersLookup =
maybeTimeline.map(timeline -> timeline.lookup(theInterval));
if (!maybeServersLookup.isPresent() || Iterables.isEmpty(maybeServersLookup.get())) {
return Collections.emptyMap();
}
Map servedIntervals = new TreeMap<>(
new Comparator()
{
@Override
public int compare(Interval o1, Interval o2)
{
if (o1.equals(o2) || o1.overlaps(o2)) {
return 0;
} else {
return o1.isBefore(o2) ? -1 : 1;
}
}
}
);
for (TimelineObjectHolder holder : maybeServersLookup.get()) {
final Set
© 2015 - 2025 Weber Informatics LLC | Privacy Policy