org.elasticsearch.search.dfs.AggregatedDfs Maven / Gradle / Ivy
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.dfs;
import org.apache.lucene.index.Term;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
import org.elasticsearch.common.trove.impl.Constants;
import org.elasticsearch.common.trove.iterator.TObjectIntIterator;
import org.elasticsearch.common.trove.map.hash.TObjectIntHashMap;
import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class AggregatedDfs implements Streamable {
private TObjectIntHashMap dfMap;
private long maxDoc;
private AggregatedDfs() {
}
public AggregatedDfs(TObjectIntHashMap dfMap, long maxDoc) {
this.dfMap = dfMap;
this.maxDoc = maxDoc;
}
public TObjectIntHashMap dfMap() {
return dfMap;
}
public long maxDoc() {
return maxDoc;
}
public static AggregatedDfs readAggregatedDfs(StreamInput in) throws IOException {
AggregatedDfs result = new AggregatedDfs();
result.readFrom(in);
return result;
}
@Override public void readFrom(StreamInput in) throws IOException {
int size = in.readVInt();
dfMap = new ExtTObjectIntHasMap(size, Constants.DEFAULT_LOAD_FACTOR, -1);
for (int i = 0; i < size; i++) {
dfMap.put(new Term(in.readUTF(), in.readUTF()), in.readVInt());
}
maxDoc = in.readVLong();
}
@Override public void writeTo(final StreamOutput out) throws IOException {
out.writeVInt(dfMap.size());
for (TObjectIntIterator it = dfMap.iterator(); it.hasNext();) {
it.advance();
out.writeUTF(it.key().field());
out.writeUTF(it.key().text());
out.writeVInt(it.value());
}
out.writeVLong(maxDoc);
}
}