Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
org.hibernate.tool.ide.completion.HQLAnalyzer Maven / Gradle / Ivy
package org.hibernate.tool.ide.completion;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.hibernate.grammars.hql.HqlLexer;
/**
* The HQLAnalyzer can answer certain questions about a HQL String.
*
* @author leon, [email protected]
*/
public class HQLAnalyzer {
/** Defines the HQL keywords. Based on hql.g antlr grammer in 2005 ;) */
private static String[] hqlKeywords = { "between", "class", "delete",
"desc", "distinct", "elements", "escape", "exists", "false",
"fetch", "from", "full", "group", "having", "in", "indices",
"inner", "insert", "into", "is", "join", "left", "like", "new",
"not", "null", "or", "order", "outer", "properties", "right",
"select", "set", "some", "true", "union", "update", "versioned",
"where", "and", "or", "as","on", "with",
// -- SQL tokens --
// These aren't part of HQL, but recognized by the lexer. Could be
// usefull for having SQL in the editor..but for now we keep them out
// "case", "end", "else", "then", "when",
// -- EJBQL tokens --
"both", "empty", "leading", "member", "object", "of", "trailing",
};
/**
* built-in function names. Various normal builtin functions in SQL/HQL.
* Maybe sShould try and do this dynamically based on dialect or
* sqlfunctionregistry
*/
private static String[] builtInFunctions = {
// standard sql92 functions
"substring", "locate", "trim", "length", "bit_length", "coalesce",
"nullif", "abs", "mod", "sqrt",
"upper",
"lower",
"cast",
"extract",
// time functions mapped to ansi extract
"second", "minute", "hour", "day",
"month",
"year",
"str",
// misc functions - based on oracle dialect
"sign", "acos", "asin", "atan", "cos", "cosh", "exp", "ln", "sin",
"sinh", "stddev", "sqrt", "tan", "tanh", "variance",
"round", "trunc", "ceil", "floor",
"chr", "initcap", "lower", "ltrim", "rtrim", "soundex", "upper",
"ascii", "length", "to_char", "to_date",
"current_date", "current_time", "current_timestamp", "lastday",
"sysday", "systimestamp", "uid", "user",
"rowid", "rownum",
"concat", "instr", "instrb", "lpad", "replace", "rpad", "substr",
"substrb", "translate",
"substring", "locate", "bit_length", "coalesce",
"atan2", "log", "mod", "nvl", "nvl2", "power",
"add_months", "months_between", "next_day",
"max", "min", };
static {
// to allow binary search
Arrays.sort(builtInFunctions);
Arrays.sort(hqlKeywords);
}
protected SimpleHQLLexer getLexer(char chars[], int end) {
return new AntlrSimpleHQLLexer(chars,end);
}
protected SimpleHQLLexer getLexer(char chars[]) {
return new AntlrSimpleHQLLexer(chars,chars.length);
}
/**
* Returns true if the position is at a location where an entityname makes sense.
* e.g. "from Pr| where x"
* @param query
* @param cursorPosition
* @return
*/
public boolean shouldShowEntityNames(String query, int cursorPosition) {
return shouldShowEntityNames( query.toCharArray(), cursorPosition );
}
public boolean shouldShowEntityNames(char chars[], int cursorPosition) {
SimpleHQLLexer lexer = getLexer( chars, cursorPosition );
int tokenId = -1;
boolean show = false;
while ((tokenId = lexer.nextTokenId()) != HqlLexer.EOF) {
if ((tokenId == HqlLexer.FROM ||
tokenId == HqlLexer.DELETE ||
tokenId == HqlLexer.UPDATE) &&
(lexer.getTokenOffset() + lexer.getTokenLength()) < cursorPosition) {
show = true;
} else if (tokenId != HqlLexer.DOT && tokenId != HqlLexer.AS && tokenId != HqlLexer.COMMA && tokenId != HqlLexer.IDENTIFIER && tokenId != HqlLexer.WS) {
show = false;
}
}
return show;
}
public List getVisibleSubQueries(char[] chars, int position) {
SubQueryList sqList = getSubQueries(chars, position);
List visible = new ArrayList();
for (Iterator iter = sqList.subQueries.iterator(); iter.hasNext();) {
SubQuery sq = iter.next();
if (sqList.caretDepth >= sq.depth && (sq.startOffset <= position || sq.endOffset >= position)) {
visible.add(sq);
}
}
return visible;
}
public List getVisibleEntityNames(char[] chars, int position) {
List sqs = getVisibleSubQueries(chars, position);
List entityReferences = new ArrayList();
for (Iterator iter = sqs.iterator(); iter.hasNext();) {
SubQuery sq = iter.next();
entityReferences.addAll(sq.getEntityNames());
}
return entityReferences;
}
public SubQueryList getSubQueries(char[] query, int position) {
SimpleHQLLexer syntax = getLexer( query );
int numericId = -1;
List subQueries = new ArrayList();
int depth = 0;
int caretDepth = 0;
Map level2SubQuery = new HashMap();
SubQuery current = null;
while ((numericId = syntax.nextTokenId()) != HqlLexer.EOF) {
boolean tokenAdded = false;
if (numericId == HqlLexer.LEFT_PAREN) {
depth++;
if (position > syntax.getTokenOffset()) {
caretDepth = depth;
}
} else if (numericId == HqlLexer.RIGHT_PAREN) {
SubQuery currentDepthQuery = level2SubQuery.get(Integer.valueOf(depth));
// We check if we have a query on the current depth.
// If yes, we'll have to close it
if (currentDepthQuery != null && currentDepthQuery.depth == depth) {
currentDepthQuery.endOffset = syntax.getTokenOffset();
currentDepthQuery.tokenIds.add(Integer.valueOf(numericId));
currentDepthQuery.tokenText.add(String.valueOf(query, syntax.getTokenOffset(), syntax.getTokenLength()));
subQueries.add(currentDepthQuery);
level2SubQuery.remove(Integer.valueOf(depth));
tokenAdded = true;
}
depth--;
if (position > syntax.getTokenOffset()) {
caretDepth = depth;
}
}
switch (numericId) {
case HqlLexer.FROM:
case HqlLexer.UPDATE:
case HqlLexer.DELETE:
case HqlLexer.SELECT:
if (!level2SubQuery.containsKey(Integer.valueOf(depth))) {
current = new SubQuery();
current.depth = depth;
current.startOffset = syntax.getTokenOffset();
level2SubQuery.put(Integer.valueOf(depth), current);
}
current.tokenIds.add(Integer.valueOf(numericId));
current.tokenText.add(String.valueOf(query, syntax.getTokenOffset(), syntax.getTokenLength()));
break;
default:
if (!tokenAdded) {
SubQuery sq = level2SubQuery.get(Integer.valueOf(depth));
int i = depth;
while (sq == null && i >= 0) {
sq = level2SubQuery.get(Integer.valueOf(i--));
}
if (sq != null) {
sq.tokenIds.add(Integer.valueOf(numericId));
sq.tokenText.add(String.valueOf(query, syntax.getTokenOffset(), syntax.getTokenLength()));
}
}
}
}
for (Iterator iter = level2SubQuery.values().iterator(); iter.hasNext();) {
SubQuery sq = iter.next();
sq.endOffset = syntax.getTokenOffset() + syntax.getTokenLength();
subQueries.add(sq);
}
Collections.sort(subQueries);
SubQueryList sql = new SubQueryList();
sql.caretDepth = caretDepth;
sql.subQueries = subQueries;
return sql;
}
/** Returns reference name found from position and backwards in the array.
**/
public static String getEntityNamePrefix(char[] chars, int position) {
StringBuffer buff = new StringBuffer();
for (int i = position - 1; i >= 0; i--) {
char c = chars[i];
if (c == '.' || Character.isJavaIdentifierPart(c)) {
buff.insert(0, c);
} else {
break;
}
}
return buff.toString();
}
public static class SubQueryList {
int caretDepth;
public List subQueries;
}
static String[] getHQLKeywords() {
return hqlKeywords;
}
static String[] getHQLFunctionNames() {
return builtInFunctions;
}
}