Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.spikeify.aerospikeql.execute.Retrieve Maven / Gradle / Ivy
package com.spikeify.aerospikeql.execute;
import com.aerospike.client.AerospikeException;
import com.aerospike.client.query.ResultSet;
import com.spikeify.aerospikeql.Definitions;
import com.spikeify.aerospikeql.parse.QueryFields;
import com.spikeify.aerospikeql.parse.fields.HavingField;
import com.spikeify.aerospikeql.parse.fields.OrderField;
import java.math.BigDecimal;
import java.util.*;
/**
* Created by roman on 17/08/15.
*
* Retrieve a List> data structure (ResultsSet and QueryDiagnostics)
*/
public class Retrieve {
private final QueryFields queryFields;
private final ResultSet rs;
private final long overallStart;
private Profile profile;
public Retrieve(QueryFields queryFields, ResultSet rs, long overallStart) {
this.queryFields = queryFields;
this.rs = rs;
this.overallStart = overallStart;
}
public List> retrieve() {
boolean groupedResults = queryFields.getGroupList().size() > 0;
boolean orderedResults = queryFields.getOrderFields().getOrderList().size() > 0;
Map diagnostic = null;
List> resultList = new ArrayList<>();
Set distinctCounters = queryFields.getSelectField().getDistinctCounters();
HavingField having = queryFields.getHavingField();
//set having expression
if (queryFields.getHavingField().getFields().size() > 0)
having.setExpression(overallStart * 1000);
try {
while (rs.next()) {
Object result = rs.getObject();
if (groupedResults) { //all rows are in a single hash map
diagnostic = aggregationResultsList(result, resultList, having, queryFields.getAverages(), distinctCounters);
} else { //results come in separated hash maps. This are queries without group by statements
diagnostic = basicResultsList(result, resultList, queryFields.getAverages(), distinctCounters);
if (!orderedResults && queryFields.getLimit() == resultList.size()) {
break;
}
}
}
} catch (AerospikeException e) {
e.printStackTrace();
} finally {
if (!(rs == null))
rs.close();
}
if (orderedResults) {
sortElements(resultList, queryFields.getOrderFields());
}
if (diagnostic == null) { //without group by statements
diagnostic = new HashMap<>();
diagnostic.put("count", resultList.size());
}
if (queryFields.getLimit() != -1) { //limit statements
if (resultList.size() > queryFields.getLimit()) {
resultList = resultList.subList(0, queryFields.getLimit());
}
}
//remove fields that are not in select statements and set correct order to field.
List selectFields = queryFields.getSelectField().getSelectList();
if (!Definitions.isSelectAll(selectFields)) {
for (int i = 0; i < resultList.size(); i++) {
resultList.set(i, convertMapToSortedMap(resultList.get(i), queryFields.getSelectField().getAliases()));
}
}
long overallEnd = System.currentTimeMillis();
long executionTime = overallEnd - overallStart;
this.profile = new Profile(overallStart, overallEnd, executionTime, (long) resultList.size(), new Long(diagnostic.get("count").toString()), (long) queryFields.getQueriedColumns().size());
return resultList;
}
@SuppressWarnings("unchecked")
private Map basicResultsList(Object result, List> resultList, List averageFields, Set distinctCounters) {
//results come in separated hash maps. This are queries without group by statements
Map hm = (Map) result;
Map diagnostic = null;
if (hm.size() > 0) {
calculateDistinctCounters(hm, distinctCounters);
calculateAverages(averageFields, hm);
replaceLuaLimitValues(hm);
if (hm.containsKey("sys_")) {
diagnostic = (HashMap) hm.remove("sys_");
}
resultList.add(hm);
}
return diagnostic;
}
@SuppressWarnings("unchecked")
private Map aggregationResultsList(Object result, List> resultList, HavingField having, List averageFields, Set distinctCounters) {
//all rows are in a single hash map
Map> hm = (Map>) result;
Iterator>> iterator = hm.entrySet().iterator();
Map diagnostic = null;
while (iterator.hasNext()) {
Map.Entry> entry = iterator.next();
if (entry.getKey().equals("sys_")) {
diagnostic = entry.getValue();
} else {
Map values = entry.getValue();
calculateDistinctCounters(values, distinctCounters);
calculateAverages(averageFields, values);
replaceLuaLimitValues(values);
if (evaluateHavingStatement(having, values)) {
resultList.add(values);
}
}
iterator.remove();
}
return diagnostic;
}
private Map convertMapToSortedMap(Map unsortedMap, List fields) {
Map sortedMap = new LinkedHashMap<>();
for (String field : fields) {
sortedMap.put(field, unsortedMap.get(field));
}
return sortedMap;
}
/**
* having statements is evaluated by EvalEx. This method sets variables in having statements with values.
*/
private boolean evaluateHavingStatement(HavingField having, Map hm) {
if (having.getFields().size() > 0) {
for (String field : having.getFields()) {
if (hm.get(field) == null) {
return false;
}
having.getExpression().and(field, new BigDecimal(hm.get(field).toString()));
}
return having.getExpression().eval().intValue() == 1;
}
return true;
}
private void replaceLuaLimitValues(Map values) {
Long minLong = new Long(Definitions.LuaValues.Max.value);
Long maxLong = new Long(Definitions.LuaValues.Min.value);
for (String subKey : values.keySet()) {
if (!subKey.equals("sys_") && values.get(subKey) != null && (values.get(subKey).equals(minLong) || values.get(subKey).equals(maxLong))) {
values.put(subKey, null);
}
}
}
/**
* takes a size of a hash map with distinct values.
*/
private void calculateDistinctCounters(Map values, Set distinctCounters) {
for (String subKey : values.keySet()) {
if (!subKey.equals("sys_") && distinctCounters.contains(subKey) && values.get(subKey) instanceof HashMap) {
values.put(subKey, (long) ((HashMap) values.get(subKey)).size());
}
}
}
/**
* calculate averages for fields
*
* @param averageFields - field names to calculate averages
* @param hm - values
*/
private void calculateAverages(List averageFields, Map hm) {
if (averageFields.size() > 0) {
for (String fieldAvg : averageFields) {
Long counter = (Long) hm.remove(fieldAvg + "_count_");
counter = counter != 0 ? counter : 1;
hm.put(fieldAvg, (Long) hm.get(fieldAvg) * 1.0 / counter);
}
}
}
/**
* sort result list
*/
private void sortElements(List> list, final OrderField orderField) {
final List orderList = orderField.getOrderList();
Collections.sort(list, new Comparator>() {
public int compare(Map one, Map two) {
for (String key : orderList) {
int sortOrder = orderField.getOrderDirection().get(key);
String first = one != null && one.containsKey(key) && one.get(key) != null ? one.get(key).toString() : sortOrder == 1 ? String.valueOf(Integer.MAX_VALUE) : String.valueOf(Integer.MIN_VALUE);
String second = two != null && two.containsKey(key) && two.get(key) != null ? two.get(key).toString() : sortOrder == 1 ? String.valueOf(Integer.MAX_VALUE) : String.valueOf(Integer.MIN_VALUE);
if (one != null && one.containsKey(key) && two != null && two.containsKey(key) && (one.get(key) instanceof Map || two.get(key) instanceof Map || one.get(key) instanceof List || two.get(key) instanceof List)) {
return 0;
} else if (one != null && one.containsKey(key) && two != null && two.containsKey(key) && (one.get(key) instanceof String || two.get(key) instanceof String || one.get(key) instanceof Boolean || two.get(key) instanceof Boolean)) {
return first.compareTo(second) * orderField.getOrderDirection().get(key);
} else if (!first.equals(second)) {
return new BigDecimal(first).compareTo(new BigDecimal(second)) * orderField.getOrderDirection().get(key);
}
}
return 0;
}
});
}
public Profile getProfile() {
return profile;
}
}