All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.elasticsearch.index.search.MatchQuery Maven / Gradle / Ivy

There is a newer version: 8.15.0
Show newest version
/*
 * Licensed to Elasticsearch under one or more contributor
 * license agreements. See the NOTICE file distributed with
 * this work for additional information regarding copyright
 * ownership. Elasticsearch licenses this file to you under
 * the Apache License, Version 2.0 (the "License"); you may
 * not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing,
 * software distributed under the License is distributed on an
 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 * KIND, either express or implied.  See the License for the
 * specific language governing permissions and limitations
 * under the License.
 */

package org.elasticsearch.index.search;

import org.apache.logging.log4j.LogManager;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CachingTokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.util.QueryBuilder;
import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.support.QueryParsers;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.function.Supplier;

import static org.elasticsearch.common.lucene.search.Queries.newLenientFieldQuery;
import static org.elasticsearch.common.lucene.search.Queries.newUnmappedFieldQuery;

public class MatchQuery {

    private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(MappedFieldType.class));

    public enum Type implements Writeable {
        /**
         * The text is analyzed and terms are added to a boolean query.
         */
        BOOLEAN(0),
        /**
         * The text is analyzed and used as a phrase query.
         */
        PHRASE(1),
        /**
         * The text is analyzed and used in a phrase query, with the last term acting as a prefix.
         */
        PHRASE_PREFIX(2);

        private final int ordinal;

        Type(int ordinal) {
            this.ordinal = ordinal;
        }

        public static Type readFromStream(StreamInput in) throws IOException {
            int ord = in.readVInt();
            for (Type type : Type.values()) {
                if (type.ordinal == ord) {
                    return type;
                }
            }
            throw new ElasticsearchException("unknown serialized type [" + ord + "]");
        }

        @Override
        public void writeTo(StreamOutput out) throws IOException {
            out.writeVInt(this.ordinal);
        }
    }

    public enum ZeroTermsQuery implements Writeable {
        NONE(0),
        ALL(1),
        // this is used internally to make sure that query_string and simple_query_string
        // ignores query part that removes all tokens.
        NULL(2);

        private final int ordinal;

        ZeroTermsQuery(int ordinal) {
            this.ordinal = ordinal;
        }

        public static ZeroTermsQuery readFromStream(StreamInput in) throws IOException {
            int ord = in.readVInt();
            for (ZeroTermsQuery zeroTermsQuery : ZeroTermsQuery.values()) {
                if (zeroTermsQuery.ordinal == ord) {
                    return zeroTermsQuery;
                }
            }
            throw new ElasticsearchException("unknown serialized type [" + ord + "]");
        }

        @Override
        public void writeTo(StreamOutput out) throws IOException {
            out.writeVInt(this.ordinal);
        }
    }

    public static final int DEFAULT_PHRASE_SLOP = 0;

    public static final boolean DEFAULT_LENIENCY = false;

    public static final ZeroTermsQuery DEFAULT_ZERO_TERMS_QUERY = ZeroTermsQuery.NONE;

    protected final QueryShardContext context;

    protected Analyzer analyzer;

    protected BooleanClause.Occur occur = BooleanClause.Occur.SHOULD;

    protected boolean enablePositionIncrements = true;

    protected int phraseSlop = DEFAULT_PHRASE_SLOP;

    protected Fuzziness fuzziness = null;

    protected int fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength;

    protected int maxExpansions = FuzzyQuery.defaultMaxExpansions;

    protected SpanMultiTermQueryWrapper.SpanRewriteMethod spanRewriteMethod =
        new SpanBooleanQueryRewriteWithMaxClause(FuzzyQuery.defaultMaxExpansions, false);

    protected boolean transpositions = FuzzyQuery.defaultTranspositions;

    protected MultiTermQuery.RewriteMethod fuzzyRewriteMethod;

    protected boolean lenient = DEFAULT_LENIENCY;

    protected ZeroTermsQuery zeroTermsQuery = DEFAULT_ZERO_TERMS_QUERY;

    protected Float commonTermsCutoff = null;

    protected boolean autoGenerateSynonymsPhraseQuery = true;

    public MatchQuery(QueryShardContext context) {
        this.context = context;
    }

    public void setAnalyzer(String analyzerName) {
        this.analyzer = context.getMapperService().getIndexAnalyzers().get(analyzerName);
        if (analyzer == null) {
            throw new IllegalArgumentException("No analyzer found for [" + analyzerName + "]");
        }
    }

    public void setAnalyzer(Analyzer analyzer) {
        this.analyzer = analyzer;
    }

    public void setOccur(BooleanClause.Occur occur) {
        this.occur = occur;
    }

    public void setCommonTermsCutoff(Float cutoff) {
        this.commonTermsCutoff = cutoff;
    }

    public void setEnablePositionIncrements(boolean enablePositionIncrements) {
        this.enablePositionIncrements = enablePositionIncrements;
    }

    public void setPhraseSlop(int phraseSlop) {
        this.phraseSlop = phraseSlop;
    }

    public void setFuzziness(Fuzziness fuzziness) {
        this.fuzziness = fuzziness;
    }

    public void setFuzzyPrefixLength(int fuzzyPrefixLength) {
        this.fuzzyPrefixLength = fuzzyPrefixLength;
    }

    public void setMaxExpansions(int maxExpansions) {
        this.maxExpansions = maxExpansions;
        this.spanRewriteMethod = new SpanBooleanQueryRewriteWithMaxClause(maxExpansions, false);
    }

    public void setTranspositions(boolean transpositions) {
        this.transpositions = transpositions;
    }

    public void setFuzzyRewriteMethod(MultiTermQuery.RewriteMethod fuzzyRewriteMethod) {
        this.fuzzyRewriteMethod = fuzzyRewriteMethod;
    }

    public void setLenient(boolean lenient) {
        this.lenient = lenient;
    }

    public void setZeroTermsQuery(ZeroTermsQuery zeroTermsQuery) {
        this.zeroTermsQuery = zeroTermsQuery;
    }

    public void setAutoGenerateSynonymsPhraseQuery(boolean enabled) {
        this.autoGenerateSynonymsPhraseQuery = enabled;
    }

    public Query parse(Type type, String fieldName, Object value) throws IOException {
        final MappedFieldType fieldType = context.fieldMapper(fieldName);
        if (fieldType == null) {
            return newUnmappedFieldQuery(fieldName);
        }
        Analyzer analyzer = getAnalyzer(fieldType, type == Type.PHRASE || type == Type.PHRASE_PREFIX);
        assert analyzer != null;

        MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType);

        /*
         * If a keyword analyzer is used, we know that further analysis isn't
         * needed and can immediately return a term query.
         */
        if (analyzer == Lucene.KEYWORD_ANALYZER
                && type != Type.PHRASE_PREFIX) {
            return builder.newTermQuery(new Term(fieldName, value.toString()));
        }

        return parseInternal(type, fieldName, builder, value);
    }

    protected final Query parseInternal(Type type, String fieldName, MatchQueryBuilder builder, Object value) throws IOException {
        final Query query;
        switch (type) {
            case BOOLEAN:
                if (commonTermsCutoff == null) {
                    query = builder.createBooleanQuery(fieldName, value.toString(), occur);
                } else {
                    query = createCommonTermsQuery(builder, fieldName, value.toString(), occur, occur, commonTermsCutoff);
                }
                break;

            case PHRASE:
                query = builder.createPhraseQuery(fieldName, value.toString(), phraseSlop);
                break;

            case PHRASE_PREFIX:
                query = builder.createPhrasePrefixQuery(fieldName, value.toString(), phraseSlop);
                break;

            default:
                throw new IllegalStateException("No type found for [" + type + "]");
        }

        return query == null ? zeroTermsQuery() : query;
    }

    private Query createCommonTermsQuery(MatchQueryBuilder builder, String field, String queryText,
                                         Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency) {
        Query booleanQuery = builder.createBooleanQuery(field, queryText, lowFreqOccur);
        if (booleanQuery != null && booleanQuery instanceof BooleanQuery) {
            BooleanQuery bq = (BooleanQuery) booleanQuery;
            return boolToExtendedCommonTermsQuery(bq, highFreqOccur, lowFreqOccur, maxTermFrequency);
        }
        return booleanQuery;
    }

    private Query boolToExtendedCommonTermsQuery(BooleanQuery bq,
                                                 Occur highFreqOccur,
                                                 Occur lowFreqOccur,
                                                 float maxTermFrequency) {
        ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency);
        for (BooleanClause clause : bq.clauses()) {
            if ((clause.getQuery() instanceof TermQuery) == false) {
                return bq;
            }
            query.add(((TermQuery) clause.getQuery()).getTerm());
        }
        return query;
    }

    protected Analyzer getAnalyzer(MappedFieldType fieldType, boolean quoted) {
        if (analyzer == null) {
            return quoted ? context.getSearchQuoteAnalyzer(fieldType) : context.getSearchAnalyzer(fieldType);
        } else {
            return analyzer;
        }
    }

    protected Query zeroTermsQuery() {
        switch (zeroTermsQuery) {
            case NULL:
                return null;
            case NONE:
                return Queries.newMatchNoDocsQuery("Matching no documents because no terms present");
            case ALL:
                return Queries.newMatchAllQuery();
            default:
                throw new IllegalStateException("unknown zeroTermsQuery " + zeroTermsQuery);
        }
    }

    private boolean hasPositions(MappedFieldType fieldType) {
        return fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
    }

    class MatchQueryBuilder extends QueryBuilder {
        private final MappedFieldType fieldType;

        /**
         * Creates a new QueryBuilder using the given analyzer.
         */
        MatchQueryBuilder(Analyzer analyzer, MappedFieldType fieldType) {
            super(analyzer);
            this.fieldType = fieldType;
            if (hasPositions(fieldType)) {
                setAutoGenerateMultiTermSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery);
            } else {
                setAutoGenerateMultiTermSynonymsPhraseQuery(false);
            }
            setEnablePositionIncrements(enablePositionIncrements);
        }

        @Override
        protected Query createFieldQuery(Analyzer analyzer, BooleanClause.Occur operator, String field,
                                         String queryText, boolean quoted, int slop) {
            assert operator == BooleanClause.Occur.SHOULD || operator == BooleanClause.Occur.MUST;
            Type type = quoted ? Type.PHRASE : Type.BOOLEAN;
            return createQuery(field, queryText, type, operator, slop);
        }

        public Query createPhrasePrefixQuery(String field, String queryText, int slop) {
            return createQuery(field, queryText, Type.PHRASE_PREFIX, occur, slop);
        }

        private Query createFieldQuery(TokenStream source, Type type, BooleanClause.Occur operator, String field, int phraseSlop) {
            assert operator == BooleanClause.Occur.SHOULD || operator == BooleanClause.Occur.MUST;

            // Build an appropriate query based on the analysis chain.
            try (CachingTokenFilter stream = new CachingTokenFilter(source)) {

                TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
                PositionIncrementAttribute posIncAtt = stream.addAttribute(PositionIncrementAttribute.class);
                PositionLengthAttribute posLenAtt = stream.addAttribute(PositionLengthAttribute.class);

                if (termAtt == null) {
                    return null;
                }

                // phase 1: read through the stream and assess the situation:
                // counting the number of tokens/positions and marking if we have any synonyms.

                int numTokens = 0;
                int positionCount = 0;
                boolean hasSynonyms = false;
                boolean isGraph = false;

                stream.reset();
                while (stream.incrementToken()) {
                    numTokens++;
                    int positionIncrement = posIncAtt.getPositionIncrement();
                    if (positionIncrement != 0) {
                        positionCount += positionIncrement;
                    } else {
                        hasSynonyms = true;
                    }

                    int positionLength = posLenAtt.getPositionLength();
                    if (enableGraphQueries && positionLength > 1) {
                        isGraph = true;
                    }
                }

                // phase 2: based on token count, presence of synonyms, and options
                // formulate a single term, boolean, or phrase.
                if (numTokens == 0) {
                    return null;
                } else if (numTokens == 1) {
                    // single term
                    if (type == Type.PHRASE_PREFIX) {
                        return analyzePhrasePrefix(field, stream, phraseSlop, positionCount);
                    } else {
                        return analyzeTerm(field, stream);
                    }
                } else if (isGraph) {
                    // graph
                    if (type == Type.PHRASE || type == Type.PHRASE_PREFIX) {
                        return analyzeGraphPhrase(stream, field, type, phraseSlop);
                    } else {
                        return analyzeGraphBoolean(field, stream, operator);
                    }
                } else if (type == Type.PHRASE && positionCount > 1) {
                    // phrase
                    if (hasSynonyms) {
                        // complex phrase with synonyms
                        return analyzeMultiPhrase(field, stream, phraseSlop);
                    } else {
                        // simple phrase
                        return analyzePhrase(field, stream, phraseSlop);
                    }
                } else if (type == Type.PHRASE_PREFIX) {
                    // phrase prefix
                    return analyzePhrasePrefix(field, stream, phraseSlop, positionCount);
                } else {
                    // boolean
                    if (positionCount == 1) {
                        // only one position, with synonyms
                        return analyzeBoolean(field, stream);
                    } else {
                        // complex case: multiple positions
                        return analyzeMultiBoolean(field, stream, operator);
                    }
                }
            } catch (IOException e) {
                throw new RuntimeException("Error analyzing query text", e);
            }
        }

        private Query createQuery(String field, String queryText, Type type, BooleanClause.Occur operator, int phraseSlop) {
            // Use the analyzer to get all the tokens, and then build an appropriate
            // query based on the analysis chain.
            try (TokenStream source = analyzer.tokenStream(field, queryText)) {
                if (source.hasAttribute(DisableGraphAttribute.class)) {
                    /*
                     * A {@link TokenFilter} in this {@link TokenStream} disabled the graph analysis to avoid
                     * paths explosion. See {@link org.elasticsearch.index.analysis.ShingleTokenFilterFactory} for details.
                     */
                    setEnableGraphQueries(false);
                }
                try {
                    return createFieldQuery(source, type, operator, field, phraseSlop);
                } finally {
                    setEnableGraphQueries(true);
                }
            } catch (IOException e) {
                throw new RuntimeException("Error analyzing query text", e);
            }
        }

        private SpanQuery newSpanQuery(Term[] terms, boolean prefix) {
            if (terms.length == 1) {
                return prefix ? fieldType.spanPrefixQuery(terms[0].text(), spanRewriteMethod, context) : new SpanTermQuery(terms[0]);
            }
            SpanQuery[] spanQueries = new SpanQuery[terms.length];
            for (int i = 0; i < terms.length; i++) {
                spanQueries[i] = prefix ? new SpanTermQuery(terms[i]) :
                    fieldType.spanPrefixQuery(terms[i].text(), spanRewriteMethod, context);
            }
            return new SpanOrQuery(spanQueries);
        }

        @Override
        protected SpanQuery createSpanQuery(TokenStream in, String field) throws IOException {
            return createSpanQuery(in, field, false);
        }

        private SpanQuery createSpanQuery(TokenStream in, String field, boolean prefix) throws IOException {
            TermToBytesRefAttribute termAtt = in.getAttribute(TermToBytesRefAttribute.class);
            PositionIncrementAttribute posIncAtt = in.getAttribute(PositionIncrementAttribute.class);
            if (termAtt == null) {
                return null;
            }

            SpanNearQuery.Builder builder = new SpanNearQuery.Builder(field, true);
            Term lastTerm = null;
            while (in.incrementToken()) {
                if (posIncAtt.getPositionIncrement() > 1) {
                    builder.addGap(posIncAtt.getPositionIncrement()-1);
                }
                if (lastTerm != null) {
                    builder.addClause(new SpanTermQuery(lastTerm));
                }
                lastTerm = new Term(field, termAtt.getBytesRef());
            }
            if (lastTerm != null) {
                SpanQuery spanQuery = prefix ?
                    fieldType.spanPrefixQuery(lastTerm.text(), spanRewriteMethod, context) : new SpanTermQuery(lastTerm);
                builder.addClause(spanQuery);
            }
            SpanNearQuery query = builder.build();
            SpanQuery[] clauses = query.getClauses();
            if (clauses.length == 1) {
                return clauses[0];
            } else {
                return query;
            }
        }

        @Override
        protected Query newTermQuery(Term term) {
            Supplier querySupplier;
            if (fuzziness != null) {
                querySupplier = () -> {
                    Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions);
                    if (query instanceof FuzzyQuery) {
                        QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod);
                    }
                    return query;
                };
            } else {
                querySupplier = () -> fieldType.termQuery(term.bytes(), context);
            }
            try {
                Query query = querySupplier.get();
                return query;
            } catch (RuntimeException e) {
                if (lenient) {
                    return newLenientFieldQuery(fieldType.name(), e);
                } else {
                    throw e;
                }
            }
        }

        @Override
        protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException {
            try {
                checkForPositions(field);
                return fieldType.phraseQuery(stream, slop, enablePositionIncrements);
            } catch (IllegalStateException e) {
                if (lenient) {
                    return newLenientFieldQuery(field, e);
                }
                throw e;
            } catch (IllegalArgumentException e) {
                if (lenient == false) {
                    DEPRECATION_LOGGER.deprecated(e.getMessage());
                }
                return newLenientFieldQuery(field, e);
            }
        }

        @Override
        protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException {
            try {
                checkForPositions(field);
                return fieldType.multiPhraseQuery(stream, slop, enablePositionIncrements);
            } catch (IllegalStateException e) {
                if (lenient) {
                    return newLenientFieldQuery(field, e);
                }
                throw e;
            } catch (IllegalArgumentException e) {
                if (lenient == false) {
                    DEPRECATION_LOGGER.deprecated(e.getMessage());
                }
                return newLenientFieldQuery(field, e);
            }
        }

        private Query analyzePhrasePrefix(String field, TokenStream stream, int slop, int positionCount) throws IOException {
            try {
                if (positionCount > 1) {
                    checkForPositions(field);
                }
                return fieldType.phrasePrefixQuery(stream, slop, maxExpansions);
            } catch (IllegalStateException e) {
                if (lenient) {
                    return newLenientFieldQuery(field, e);
                }
                throw e;
            } catch (IllegalArgumentException e) {
                if (lenient == false) {
                    DEPRECATION_LOGGER.deprecated(e.getMessage());
                }
            return newLenientFieldQuery(field, e);
        }
        }

        private Query analyzeGraphPhrase(TokenStream source, String field, Type type, int slop) throws IOException {
            assert type == Type.PHRASE_PREFIX || type == Type.PHRASE;

            source.reset();
            GraphTokenStreamFiniteStrings graph = new GraphTokenStreamFiniteStrings(source);
            if (phraseSlop > 0) {
                /*
                 * Creates a boolean query from the graph token stream by extracting all the finite strings from the graph
                 * and using them to create phrase queries with the appropriate slop.
                 */
                BooleanQuery.Builder builder = new BooleanQuery.Builder();
                Iterator it = graph.getFiniteStrings();
                while (it.hasNext()) {
                    Query query = createFieldQuery(it.next(), type, BooleanClause.Occur.MUST, field, slop);
                    if (query != null) {
                        builder.add(query, BooleanClause.Occur.SHOULD);
                    }
                }
                return builder.build();
            }

            /*
             * Creates a span near (phrase) query from a graph token stream.
             * The articulation points of the graph are visited in order and the queries
             * created at each point are merged in the returned near query.
             */
            List clauses = new ArrayList<>();
            int[] articulationPoints = graph.articulationPoints();
            int lastState = 0;
            int maxClauseCount = BooleanQuery.getMaxClauseCount();
            for (int i = 0; i <= articulationPoints.length; i++) {
                int start = lastState;
                int end = -1;
                if (i < articulationPoints.length) {
                    end = articulationPoints[i];
                }
                lastState = end;
                final SpanQuery queryPos;
                boolean endPrefix = end == -1 && type == Type.PHRASE_PREFIX;
                if (graph.hasSidePath(start)) {
                    List queries = new ArrayList<>();
                    Iterator it = graph.getFiniteStrings(start, end);
                    while (it.hasNext()) {
                        TokenStream ts = it.next();
                        SpanQuery q = createSpanQuery(ts, field, endPrefix);
                        if (q != null) {
                            if (queries.size() >= maxClauseCount) {
                                throw new BooleanQuery.TooManyClauses();
                            }
                            queries.add(q);
                        }
                    }
                    if (queries.size() > 0) {
                        queryPos = new SpanOrQuery(queries.toArray(new SpanQuery[0]));
                    } else {
                        queryPos = null;
                    }
                } else {
                    Term[] terms = graph.getTerms(field, start);
                    assert terms.length > 0;
                    if (terms.length >= maxClauseCount) {
                        throw new BooleanQuery.TooManyClauses();
                    }
                    queryPos = newSpanQuery(terms, endPrefix);
                }

                if (queryPos != null) {
                    if (clauses.size() >= maxClauseCount) {
                        throw new BooleanQuery.TooManyClauses();
                    }
                    clauses.add(queryPos);
                }
            }

            if (clauses.isEmpty()) {
                return null;
            } else if (clauses.size() == 1) {
                return clauses.get(0);
            } else {
                return new SpanNearQuery(clauses.toArray(new SpanQuery[0]), 0, true);
            }
        }

        private void checkForPositions(String field) {
            if (hasPositions(fieldType) == false) {
                throw new IllegalStateException("field:[" + field + "] was indexed without position data; cannot run PhraseQuery");
            }
        }
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy