All Downloads are FREE. Search and download functionalities are using the official Maven repository.

cdc.applic.expressions.parsing.Analyzer Maven / Gradle / Ivy

There is a newer version: 0.13.3
Show newest version
package cdc.applic.expressions.parsing;

import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;

import cdc.applic.expressions.SyntacticException;

public class Analyzer extends AbstractParser {
    public Analyzer() {
        super();
    }

    public void analyze(String expression,
                        Consumer consumer) {
        tokenizer.init(expression);
        tokenizer.next(true);
        parseExpr(consumer);
        fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
        expect("analyze", true, TokenType.EPSILON);
    }

    public static List analyze(String expression) {
        final List events = new ArrayList<>();
        final Analyzer analyzer = new Analyzer();
        analyzer.analyze(expression, events::add);
        return events;
    }

    private static void fire(Consumer consumer,
                             Token token,
                             ParsingEvent.Info info) {
        consumer.accept(new ParsingEvent(token, info));
    }

    private void parseExpr(Consumer consumer) {
        parsePExpr(consumer);
        while (tokenizer.getTokenType().isBinary()) {
            fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
            tokenizer.next(true);
            parsePExpr(consumer);
        }
    }

    private void parsePExpr(Consumer consumer) {
        final String context = "parsePExpr";
        switch (tokenizer.getTokenType()) {
        case OPEN_PAREN:
            fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
            tokenizer.next(true);
            parseExpr(consumer);
            fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
            expect(context, true, TokenType.CLOSE_PAREN);
            break;
        case NOT:
            fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
            tokenizer.next(true);
            parseExpr(consumer);
            break;
        case TRUE:
        case FALSE:
            fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
            tokenizer.next(true);
            break;
        case TEXT:
        case ESCAPED_TEXT:
            parseBaseExpr(consumer);
            break;
        default:
            throw newInvalidExpression(SyntacticException.Detail.UNEXPECTED_TOKEN,
                                       "Unexpected token: " + tokenizer.getToken() + " in: '" + tokenizer.getExpression()
                                               + "' parsePExpr");
        }
    }

    private void parseBaseExpr(Consumer consumer) {
        final String context = "parseBaseExpr";
        final Token token0 = tokenizer.getToken(); // TEXT or ESCAPED_TEXT
        tokenizer.next(true);
        final Token token1 = tokenizer.getToken();
        final Token token2;
        if (tokenizer.getTokenType() == TokenType.PATH_SEP) {
            tokenizer.next(true);
            token2 = tokenizer.getToken();
            expect(context, true, TEXT_TOKEN_TYPES);
        } else {
            token2 = null;
        }

        // Current token is just after a name (with or without prefix)
        // If token2 == null, token0 is a local name
        // Otherwise, token0 is a prefix, token1 a path separator, and token2 a local name.
        // However, we don't know yet if it is a property or a ref

        switch (tokenizer.getTokenType()) {
        case EQUAL:
        case NOT_EQUAL:
        case LESS:
        case LESS_OR_EQUAL:
        case GREATER:
        case GREATER_OR_EQUAL:
        case NOT_LESS:
        case NEITHER_LESS_NOR_EQUAL:
        case NOT_GREATER:
        case NEITHER_GREATER_NOR_EQUAL:
            if (token2 == null) {
                fire(consumer, token0, ParsingEvent.Info.PROPERTY_LOCAL_NAME);
                fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
            } else {
                fire(consumer, token0, ParsingEvent.Info.PROPERTY_PREFIX);
                fire(consumer, token1, ParsingEvent.Info.OTHER);
                fire(consumer, token2, ParsingEvent.Info.PROPERTY_LOCAL_NAME);
                fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
            }

            tokenizer.next(false);
            fire(consumer, tokenizer.getToken(), ParsingEvent.Info.VALUE);
            expect(context, true, ITEM_TOKEN_TYPES);
            break;
        case IN:
        case NOT_IN:
            if (token2 == null) {
                fire(consumer, token0, ParsingEvent.Info.PROPERTY_LOCAL_NAME);
                fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
            } else {
                fire(consumer, token0, ParsingEvent.Info.PROPERTY_PREFIX);
                fire(consumer, token1, ParsingEvent.Info.OTHER);
                fire(consumer, token2, ParsingEvent.Info.PROPERTY_LOCAL_NAME);
                fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
            }

            tokenizer.next(true);
            parseSItemSet(consumer);
            break;
        default:
            if (token2 == null) {
                fire(consumer, token0, ParsingEvent.Info.REF_LOCAL_NAME);
            } else {
                fire(consumer, token0, ParsingEvent.Info.REF_PREFIX);
                fire(consumer, token1, ParsingEvent.Info.OTHER);
                fire(consumer, token2, ParsingEvent.Info.REF_LOCAL_NAME);
            }
            break;
        }
    }

    private void parseSItemSet(Consumer consumer) {
        final String context = "parseSItemSet";
        final TokenType type = tokenizer.getTokenType();

        if (type == TokenType.EMPTY_SET) {
            fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
            expect(context, true, ITEM_SET_TOKEN_TYPES);
        } else {
            fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
            expect(context, false, ITEM_SET_TOKEN_TYPES);
            if (tokenizer.getTokenType().isItem()) {
                parseSItem(consumer);
            }

            while (tokenizer.getTokenType() == TokenType.ITEMS_SEP) {
                fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
                tokenizer.next(false);
                parseSItem(consumer);
            }
            fire(consumer, tokenizer.getToken(), ParsingEvent.Info.OTHER);
            expect(context, true, TokenType.CLOSE_SET);
        }
    }

    private void parseSItem(Consumer consumer) {
        final String context = "parseSItem";
        final Token token0 = tokenizer.getToken();
        final TokenType type = tokenizer.getTokenType();
        expect(context, true, ITEM_TOKEN_TYPES);
        final Token token1 = tokenizer.getToken();
        if (type == TokenType.INTEGER && tokenizer.getTokenType() == TokenType.TO) {
            fire(consumer, token0, ParsingEvent.Info.LOW_VALUE);
            fire(consumer, token1, ParsingEvent.Info.OTHER);
            tokenizer.next(false);
            fire(consumer, tokenizer.getToken(), ParsingEvent.Info.HIGH_VALUE);
            expect(context, true, TokenType.INTEGER);
        } else if (type == TokenType.REAL && tokenizer.getTokenType() == TokenType.TO) {
            fire(consumer, token0, ParsingEvent.Info.LOW_VALUE);
            fire(consumer, token1, ParsingEvent.Info.OTHER);
            tokenizer.next(false);
            fire(consumer, tokenizer.getToken(), ParsingEvent.Info.HIGH_VALUE);
            expect(context, true, TokenType.REAL);
        } else {
            fire(consumer, token0, ParsingEvent.Info.VALUE);
        }
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy