All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.unlaxer.Token Maven / Gradle / Ivy

package org.unlaxer;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import org.unlaxer.listener.OutputLevel;
import org.unlaxer.parser.Parser;
import org.unlaxer.reducer.TagBasedReducer.NodeKind;
import org.unlaxer.util.FactoryBoundCache;

public class Token implements Serializable{
	
	private static final long serialVersionUID = -2232289508694932061L;

	static final FactoryBoundCache empties = 
			new FactoryBoundCache<>(RangedString::new);
	
	static final FactoryBoundCache displayString = 
			new FactoryBoundCache<>(
					token->TokenPrinter.get(token,0,OutputLevel.detail,false));

	
	public final Optional tokenString;
	public final Parser parser;
	public final Range tokenRange;
	
	public Optional parent;
	private final List originalChildren;
	//TODO make privateand rename astNodeChildren
	public  final List filteredChildren; // astNodeChildren 

	public enum ChildrenKind{
		original,
		astNodes
	}
	
	public final TokenKind tokenKind;
	
	
	public Token(TokenKind tokenKind , RangedString token, Parser parser) {
		this(tokenKind , token , parser , new ArrayList<>());
	}
	
	public Token(TokenKind tokenKind , List tokens , Parser parser , int position) {
		this(tokenKind , 
			createRangedString(tokens , position),
			parser,
			tokens);
	}
	
	// TODO too specialized...?
	Predicate AST_NODES = token -> false == token.parser.hasTag(NodeKind.notNode.getTag());
	

	
	public Token(TokenKind tokenKind , RangedString token, Parser parser , List children) {
		super();
		this.tokenKind = tokenKind;
		this.tokenString = token.token;
		this.tokenRange = token.range;
		this.parser = parser;
		this.originalChildren = children;
		parent= Optional.empty();
		children.stream().forEach(child->child.parent = Optional.of(this));
		this.filteredChildren = children.stream()
			.filter(AST_NODES)
			.collect(Collectors.toList());
	}
	
	public static Token empty(TokenKind tokenKind , int position , Parser parser){
		return new Token(tokenKind , empties.get(position),parser);
	}
	
	public Optional getToken() {
		return tokenString;
	}
	
	public Range getTokenRange() {
		return tokenRange;
	}
	
	public RangedString getRangedString(){
		return new RangedString(tokenRange , tokenString);
	}
	
	public Parser getParser(){
		return parser;
	}

	static RangedString createRangedString(List tokens, int position){
		
		if(tokens.isEmpty()){
			return new RangedString(position);
		}
		
		Optional token = Optional.of(
			tokens.stream()
				.map(Token::getToken)
				.filter(Optional::isPresent)
				.map(Optional::get)
				.collect(Collectors.joining()));
		
		int startIndex = tokens.get(0).tokenRange.startIndexInclusive;
		int endIndex = tokens.get(tokens.size()-1).tokenRange.endIndexExclusive;
		return new RangedString(new Range(startIndex, endIndex), token);
	}
	
	public List flatten(){
		List list = new ArrayList();
		list.add(this);
		for(Token child :originalChildren){
			list.addAll(child.flatten());
		}
		return list;
	}
	
	@Override
	public String toString() {
		return displayString.get(this);
	}
	
	public boolean isTerminalSymbol(){
		return parser.forTerminalSymbol();
	}

	public TokenKind getTokenKind() {
		return tokenKind;
	}
	
	public Token newWithReplacedParser(Parser replace){
		if(false == originalChildren.isEmpty()){
			throw new IllegalArgumentException("not support collected token");
		}
		return new Token(tokenKind,new RangedString(tokenRange, tokenString),replace);
	}
	
	List children(ChildrenKind kind){
		return kind == ChildrenKind.astNodes ? 
				filteredChildren :
				originalChildren;
	}
	
	public Token newCreatesOf(List newChildrens) {
		
		Token newToken = new Token(tokenKind , newChildrens , parser , tokenRange.startIndexInclusive);
		return newToken;
	}
	
	public Token newCreatesOf(Token... newChildrens) {
		
		return newCreatesOf(Arrays.asList(newChildrens));
	}
	
	public Token getChild(Predicate predicates) {
		return originalChildren.stream().filter(predicates).findFirst().orElseThrow();
	}
	
	public int getChildIndex(Predicate predicates) {
		return getChildIndex(ChildrenKind.astNodes, predicates);
	}

	public int getChildIndex(ChildrenKind childrenKind, Predicate predicates) {
		
		int index=0;
		for (Token token : children(childrenKind)) {
			if(predicates.test(token)) {
				return index;
			}
			index++;
		}
		throw new IllegalArgumentException("predicates is not match");
	}
	
	public Token getChildWithParser(Predicate predicatesWithTokensParser) {
		return originalChildren.stream().filter(token-> predicatesWithTokensParser.test(token.parser)).findFirst().orElseThrow();
	}
	
	public int getChildIndexWithParser(Predicate predicatesWithTokensParser) {
		return getChildIndexWithParser(ChildrenKind.astNodes, predicatesWithTokensParser);
	}
	
	public int getChildIndexWithParser(ChildrenKind childrenKind, Predicate predicatesWithTokensParser) {
		
		int index=0;
		for (Token token : children(childrenKind)) {
			if(predicatesWithTokensParser.test(token.parser)) {
				return index;
			}
			index++;
		}
		throw new IllegalArgumentException("predicates is not match");
	}

	
	public Token getChildWithParser(Class parserClass) {
		return getChildWithParser(parser -> parser.getClass() == parserClass);
	}
	
	public int getChildIndexWithParser(Class parserClass) {
		return getChildIndexWithParser(ChildrenKind.astNodes, parserClass);
	}
	
	public int getChildIndexWithParser(ChildrenKind childrenKind, Class parserClass) {
		
		return getChildIndexWithParser(childrenKind , parser -> parser.getClass() == parserClass);
	}
	
	public Optional getChildAsOptional(Predicate predicates) {
		return originalChildren.stream().filter(predicates).findFirst();
	}
	
	public Optional getChildWithParserAsOptional(Predicate predicatesWithTokensParser) {
		return originalChildren.stream().filter(token-> predicatesWithTokensParser.test(token.parser)).findFirst();
	}
	
	public Optional getChildWithParserAsOptional(Class parserClass) {
		return getChildWithParserAsOptional(parser -> parser.getClass() == parserClass);
	}
	
	public Stream getChildren(Predicate predicates) {
		return originalChildren.stream().filter(predicates);
	}
	
	public Stream getChildrenWithParser(Predicate predicatesWithTokensParser) {
		return originalChildren.stream().filter(token-> predicatesWithTokensParser.test(token.parser));
	}
	
	public Stream getChildrenWithParser(Class parserClass) {
		return getChildrenWithParser(parser -> parser.getClass() == parserClass);
	}
	
	public List getChildrenAsList(Predicate predicates) {
		return getChildren(predicates).collect(Collectors.toList());
	}
	
	public List getChildrenWithParserAsList(Predicate predicatesWithTokensParser) {
		return getChildrenWithParser(predicatesWithTokensParser).collect(Collectors.toList());
	}
	
	public List getChildrenWithParserAsList(Class parserClass) {
		return getChildrenWithParserAsList(parser -> parser.getClass() == parserClass);
	}
	
	public Token getChildFromOriginal(int index) {
		return originalChildren.get(index);
	}
	
	public Token getChildFromAstNodes(int index) {
		return filteredChildren.get(index);
	}

	public List getOriginalChildren() {
		return originalChildren;
	}

	public List getAstNodeChildren() {
		return filteredChildren;
	}
	
	public Token addChildren(Token...tokens) {
		
		for (Token token : tokens) {
			originalChildren.add(token);
			if(AST_NODES.test(token)) {
				filteredChildren.add(token);
			}
		}
		return this;
	}
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy