org.elasticsearch.percolator.QueryAnalyzer Maven / Gradle / Ivy
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.percolator;
import org.apache.lucene.index.PrefixCodedTerms;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.BlendedTermQuery;
import org.apache.lucene.queries.CommonTermsQuery;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.spans.SpanFirstQuery;
import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanNotQuery;
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
public final class QueryAnalyzer {
private static final Map, Function> queryProcessors;
static {
Map, Function> map = new HashMap<>();
map.put(MatchNoDocsQuery.class, matchNoDocsQuery());
map.put(ConstantScoreQuery.class, constantScoreQuery());
map.put(BoostQuery.class, boostQuery());
map.put(TermQuery.class, termQuery());
map.put(TermsQuery.class, termsQuery());
map.put(CommonTermsQuery.class, commonTermsQuery());
map.put(BlendedTermQuery.class, blendedTermQuery());
map.put(PhraseQuery.class, phraseQuery());
map.put(SpanTermQuery.class, spanTermQuery());
map.put(SpanNearQuery.class, spanNearQuery());
map.put(SpanOrQuery.class, spanOrQuery());
map.put(SpanFirstQuery.class, spanFirstQuery());
map.put(SpanNotQuery.class, spanNotQuery());
map.put(BooleanQuery.class, booleanQuery());
map.put(DisjunctionMaxQuery.class, disjunctionMaxQuery());
map.put(SynonymQuery.class, synonymQuery());
map.put(FunctionScoreQuery.class, functionScoreQuery());
queryProcessors = Collections.unmodifiableMap(map);
}
private QueryAnalyzer() {
}
/**
* Extracts terms from the provided query. These terms are stored with the percolator query and
* used by the percolate query's candidate query as fields to be query by. The candidate query
* holds the terms from the document to be percolated and allows to the percolate query to ignore
* percolator queries that we know would otherwise never match.
*
*
* When extracting the terms for the specified query, we can also determine if the percolator query is
* always going to match. For example if a percolator query just contains a term query or a disjunction
* query then when the candidate query matches with that, we know the entire percolator query always
* matches. This allows the percolate query to skip the expensive memory index verification step that
* it would otherwise have to execute (for example when a percolator query contains a phrase query or a
* conjunction query).
*
*
* The query analyzer doesn't always extract all terms from the specified query. For example from a
* boolean query with no should clauses or phrase queries only the longest term are selected,
* since that those terms are likely to be the rarest. Boolean query's must_not clauses are always ignored.
*
*
* Sometimes the query analyzer can't always extract terms from a sub query, if that happens then
* query analysis is stopped and an UnsupportedQueryException is thrown. So that the caller can mark
* this query in such a way that the PercolatorQuery always verifies if this query with the MemoryIndex.
*/
public static Result analyze(Query query) {
Class queryClass = query.getClass();
if (queryClass.isAnonymousClass()) {
// Sometimes queries have anonymous classes in that case we need the direct super class.
// (for example blended term query)
queryClass = queryClass.getSuperclass();
}
Function queryProcessor = queryProcessors.get(queryClass);
if (queryProcessor != null) {
return queryProcessor.apply(query);
} else {
throw new UnsupportedQueryException(query);
}
}
static Function matchNoDocsQuery() {
return (query -> new Result(true, Collections.emptySet()));
}
static Function constantScoreQuery() {
return query -> {
Query wrappedQuery = ((ConstantScoreQuery) query).getQuery();
return analyze(wrappedQuery);
};
}
static Function boostQuery() {
return query -> {
Query wrappedQuery = ((BoostQuery) query).getQuery();
return analyze(wrappedQuery);
};
}
static Function termQuery() {
return (query -> {
TermQuery termQuery = (TermQuery) query;
return new Result(true, Collections.singleton(termQuery.getTerm()));
});
}
static Function termsQuery() {
return query -> {
TermsQuery termsQuery = (TermsQuery) query;
Set terms = new HashSet<>();
PrefixCodedTerms.TermIterator iterator = termsQuery.getTermData().iterator();
for (BytesRef term = iterator.next(); term != null; term = iterator.next()) {
terms.add(new Term(iterator.field(), term));
}
return new Result(true, terms);
};
}
static Function synonymQuery() {
return query -> {
Set terms = new HashSet<>(((SynonymQuery) query).getTerms());
return new Result(true, terms);
};
}
static Function commonTermsQuery() {
return query -> {
List terms = ((CommonTermsQuery) query).getTerms();
return new Result(false, new HashSet<>(terms));
};
}
static Function blendedTermQuery() {
return query -> {
List terms = ((BlendedTermQuery) query).getTerms();
return new Result(true, new HashSet<>(terms));
};
}
static Function phraseQuery() {
return query -> {
Term[] terms = ((PhraseQuery) query).getTerms();
if (terms.length == 0) {
return new Result(true, Collections.emptySet());
}
// the longest term is likely to be the rarest,
// so from a performance perspective it makes sense to extract that
Term longestTerm = terms[0];
for (Term term : terms) {
if (longestTerm.bytes().length < term.bytes().length) {
longestTerm = term;
}
}
return new Result(false, Collections.singleton(longestTerm));
};
}
static Function spanTermQuery() {
return query -> {
Term term = ((SpanTermQuery) query).getTerm();
return new Result(true, Collections.singleton(term));
};
}
static Function spanNearQuery() {
return query -> {
Set bestClauses = null;
SpanNearQuery spanNearQuery = (SpanNearQuery) query;
for (SpanQuery clause : spanNearQuery.getClauses()) {
Result temp = analyze(clause);
bestClauses = selectTermListWithTheLongestShortestTerm(temp.terms, bestClauses);
}
return new Result(false, bestClauses);
};
}
static Function spanOrQuery() {
return query -> {
Set terms = new HashSet<>();
SpanOrQuery spanOrQuery = (SpanOrQuery) query;
for (SpanQuery clause : spanOrQuery.getClauses()) {
terms.addAll(analyze(clause).terms);
}
return new Result(false, terms);
};
}
static Function spanNotQuery() {
return query -> {
Result result = analyze(((SpanNotQuery) query).getInclude());
return new Result(false, result.terms);
};
}
static Function spanFirstQuery() {
return query -> {
Result result = analyze(((SpanFirstQuery) query).getMatch());
return new Result(false, result.terms);
};
}
static Function booleanQuery() {
return query -> {
BooleanQuery bq = (BooleanQuery) query;
List clauses = bq.clauses();
int minimumShouldMatch = bq.getMinimumNumberShouldMatch();
int numRequiredClauses = 0;
int numOptionalClauses = 0;
int numProhibitedClauses = 0;
for (BooleanClause clause : clauses) {
if (clause.isRequired()) {
numRequiredClauses++;
}
if (clause.isProhibited()) {
numProhibitedClauses++;
}
if (clause.getOccur() == BooleanClause.Occur.SHOULD) {
numOptionalClauses++;
}
}
if (numRequiredClauses > 0) {
Set bestClause = null;
UnsupportedQueryException uqe = null;
for (BooleanClause clause : clauses) {
if (clause.isRequired() == false) {
// skip must_not clauses, we don't need to remember the things that do *not* match...
// skip should clauses, this bq has must clauses, so we don't need to remember should clauses,
// since they are completely optional.
continue;
}
Result temp;
try {
temp = analyze(clause.getQuery());
} catch (UnsupportedQueryException e) {
uqe = e;
continue;
}
bestClause = selectTermListWithTheLongestShortestTerm(temp.terms, bestClause);
}
if (bestClause != null) {
return new Result(false, bestClause);
} else {
if (uqe != null) {
// we're unable to select the best clause and an exception occurred, so we bail
throw uqe;
} else {
// We didn't find a clause and no exception occurred, so this bq only contained MatchNoDocsQueries,
return new Result(true, Collections.emptySet());
}
}
} else {
List disjunctions = new ArrayList<>(numOptionalClauses);
for (BooleanClause clause : clauses) {
if (clause.getOccur() == BooleanClause.Occur.SHOULD) {
disjunctions.add(clause.getQuery());
}
}
return handleDisjunction(disjunctions, minimumShouldMatch, numProhibitedClauses > 0);
}
};
}
static Function disjunctionMaxQuery() {
return query -> {
List disjuncts = ((DisjunctionMaxQuery) query).getDisjuncts();
return handleDisjunction(disjuncts, 1, false);
};
}
static Function functionScoreQuery() {
return query -> {
FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) query;
Result result = analyze(functionScoreQuery.getSubQuery());
// If min_score is specified we can't guarantee upfront that this percolator query matches,
// so in that case we set verified to false.
// (if it matches with the percolator document matches with the extracted terms.
// Min score filters out docs, which is different than the functions, which just influences the score.)
boolean verified = functionScoreQuery.getMinScore() == null;
return new Result(verified, result.terms);
};
}
static Result handleDisjunction(List disjunctions, int minimumShouldMatch, boolean otherClauses) {
boolean verified = minimumShouldMatch <= 1 && otherClauses == false;
Set terms = new HashSet<>();
for (Query disjunct : disjunctions) {
Result subResult = analyze(disjunct);
if (subResult.verified == false) {
verified = false;
}
terms.addAll(subResult.terms);
}
return new Result(verified, terms);
}
static Set selectTermListWithTheLongestShortestTerm(Set terms1, Set terms2) {
if (terms1 == null) {
return terms2;
} else if (terms2 == null) {
return terms1;
} else {
int terms1ShortestTerm = minTermLength(terms1);
int terms2ShortestTerm = minTermLength(terms2);
// keep the clause with longest terms, this likely to be rarest.
if (terms1ShortestTerm >= terms2ShortestTerm) {
return terms1;
} else {
return terms2;
}
}
}
static int minTermLength(Set terms) {
int min = Integer.MAX_VALUE;
for (Term term : terms) {
min = Math.min(min, term.bytes().length);
}
return min;
}
static class Result {
final Set terms;
final boolean verified;
Result(boolean verified, Set terms) {
this.terms = terms;
this.verified = verified;
}
}
/**
* Exception indicating that none or some query terms couldn't extracted from a percolator query.
*/
static class UnsupportedQueryException extends RuntimeException {
private final Query unsupportedQuery;
UnsupportedQueryException(Query unsupportedQuery) {
super(LoggerMessageFormat.format("no query terms can be extracted from query [{}]", unsupportedQuery));
this.unsupportedQuery = unsupportedQuery;
}
/**
* The actual Lucene query that was unsupported and caused this exception to be thrown.
*/
public Query getUnsupportedQuery() {
return unsupportedQuery;
}
}
}