Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.hadoop.rest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.elasticsearch.hadoop.EsHadoopException;
import org.elasticsearch.hadoop.EsHadoopIllegalArgumentException;
import org.elasticsearch.hadoop.EsHadoopIllegalStateException;
import org.elasticsearch.hadoop.cfg.ConfigurationOptions;
import org.elasticsearch.hadoop.cfg.Settings;
import org.elasticsearch.hadoop.rest.Request.Method;
import org.elasticsearch.hadoop.rest.query.QueryBuilder;
import org.elasticsearch.hadoop.rest.stats.Stats;
import org.elasticsearch.hadoop.rest.stats.StatsAware;
import org.elasticsearch.hadoop.security.EsToken;
import org.elasticsearch.hadoop.serialization.ParsingUtils;
import org.elasticsearch.hadoop.serialization.dto.NodeInfo;
import org.elasticsearch.hadoop.serialization.dto.mapping.FieldParser;
import org.elasticsearch.hadoop.serialization.dto.mapping.MappingSet;
import org.elasticsearch.hadoop.serialization.json.JacksonJsonGenerator;
import org.elasticsearch.hadoop.serialization.json.JacksonJsonParser;
import org.elasticsearch.hadoop.serialization.json.JsonFactory;
import org.elasticsearch.hadoop.serialization.json.ObjectReader;
import org.elasticsearch.hadoop.thirdparty.codehaus.jackson.JsonParser;
import org.elasticsearch.hadoop.thirdparty.codehaus.jackson.map.DeserializationConfig;
import org.elasticsearch.hadoop.thirdparty.codehaus.jackson.map.ObjectMapper;
import org.elasticsearch.hadoop.thirdparty.codehaus.jackson.map.SerializationConfig;
import org.elasticsearch.hadoop.util.Assert;
import org.elasticsearch.hadoop.util.ByteSequence;
import org.elasticsearch.hadoop.util.BytesArray;
import org.elasticsearch.hadoop.util.ClusterInfo;
import org.elasticsearch.hadoop.util.ClusterName;
import org.elasticsearch.hadoop.util.EsMajorVersion;
import org.elasticsearch.hadoop.util.FastByteArrayOutputStream;
import org.elasticsearch.hadoop.util.IOUtils;
import org.elasticsearch.hadoop.util.ObjectUtils;
import org.elasticsearch.hadoop.util.StringUtils;
import org.elasticsearch.hadoop.util.TrackingBytesArray;
import org.elasticsearch.hadoop.util.encoding.HttpEncodingTools;
import org.elasticsearch.hadoop.util.unit.TimeValue;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import static org.elasticsearch.hadoop.rest.Request.Method.DELETE;
import static org.elasticsearch.hadoop.rest.Request.Method.GET;
import static org.elasticsearch.hadoop.rest.Request.Method.HEAD;
import static org.elasticsearch.hadoop.rest.Request.Method.POST;
import static org.elasticsearch.hadoop.rest.Request.Method.PUT;
import static org.elasticsearch.hadoop.util.EsMajorVersion.V_6_X;
import static org.elasticsearch.hadoop.util.EsMajorVersion.V_7_X;
import static org.elasticsearch.hadoop.util.EsMajorVersion.V_8_X;
public class RestClient implements Closeable, StatsAware {
private static final Log LOG = LogFactory.getLog(RestClient.class);
static final String ELASTIC_PRODUCT_HEADER = "X-elastic-product";
static final String ELASTIC_PRODUCT_HEADER_VALUE = "Elasticsearch";
static final String ELASTICSEARCH_BUILD_FLAVOR = "default";
static final String ELASTICSEARCH_TAGLINE = "You Know, for Search";
private NetworkClient network;
private final ObjectMapper mapper;
private final TimeValue scrollKeepAlive;
private final boolean indexReadMissingAsEmpty;
private final HttpRetryPolicy retryPolicy;
final ClusterInfo clusterInfo;
private final ErrorExtractor errorExtractor;
{
mapper = new ObjectMapper();
mapper.configure(DeserializationConfig.Feature.USE_ANNOTATIONS, false);
mapper.configure(SerializationConfig.Feature.USE_ANNOTATIONS, false);
}
private final Stats stats = new Stats();
public enum Health {
RED, YELLOW, GREEN
}
public RestClient(Settings settings) {
this(settings, new NetworkClient(settings));
}
RestClient(Settings settings, NetworkClient networkClient) {
this.network = networkClient;
this.scrollKeepAlive = TimeValue.timeValueMillis(settings.getScrollKeepAlive());
this.indexReadMissingAsEmpty = settings.getIndexReadMissingAsEmpty();
String retryPolicyName = settings.getBatchWriteRetryPolicy();
if (ConfigurationOptions.ES_BATCH_WRITE_RETRY_POLICY_SIMPLE.equals(retryPolicyName)) {
retryPolicyName = SimpleHttpRetryPolicy.class.getName();
}
else if (ConfigurationOptions.ES_BATCH_WRITE_RETRY_POLICY_NONE.equals(retryPolicyName)) {
retryPolicyName = NoHttpRetryPolicy.class.getName();
}
this.retryPolicy = ObjectUtils.instantiate(retryPolicyName, settings);
// Assume that the elasticsearch major version is the latest if the version is not already present in the settings
this.clusterInfo = settings.getClusterInfoOrUnnamedLatest();
this.errorExtractor = new ErrorExtractor(clusterInfo.getMajorVersion());
}
public List getHttpNodes(boolean clientNodeOnly) {
Map> nodesData = get("_nodes/http", "nodes");
List nodes = new ArrayList();
for (Entry> entry : nodesData.entrySet()) {
NodeInfo node = new NodeInfo(entry.getKey(), entry.getValue());
if (node.hasHttp() && (!clientNodeOnly || node.isClient())) {
nodes.add(node);
}
}
return nodes;
}
public List getHttpClientNodes() {
return getHttpNodes(true);
}
public List getHttpDataNodes() {
List nodes = getHttpNodes(false);
Iterator it = nodes.iterator();
while (it.hasNext()) {
NodeInfo node = it.next();
if (!node.isData()) {
it.remove();
}
}
return nodes;
}
public List getHttpIngestNodes() {
List nodes = getHttpNodes(false);
Iterator it = nodes.iterator();
while (it.hasNext()) {
NodeInfo nodeInfo = it.next();
if (!nodeInfo.isIngest()) {
it.remove();
}
}
return nodes;
}
public T get(String q, String string) {
return parseContent(execute(GET, q), string);
}
@SuppressWarnings("unchecked")
private T parseContent(InputStream content, String string) {
Map map = Collections.emptyMap();
try {
// create parser manually to lower Jackson requirements
JsonParser jsonParser = mapper.getJsonFactory().createJsonParser(content);
try {
map = mapper.readValue(jsonParser, Map.class);
} finally {
countStreamStats(content);
}
} catch (IOException ex) {
throw new EsHadoopParsingException(ex);
}
return (T) (string != null ? map.get(string) : map);
}
public static class BulkActionResponse {
private Iterator