com.backendless.persistence.WKTParser Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of commons Show documentation
Show all versions of commons Show documentation
Commons lib for both server and client Java/Android SDKs
The newest version!
package com.backendless.persistence;
import java.io.IOException;
import java.io.Reader;
import java.io.StreamTokenizer;
import java.io.StringReader;
import java.util.ArrayList;
public class WKTParser
{
private static final String EMPTY = "EMPTY";
private static final String COMMA = ",";
private static final String L_PAREN = "(";
private static final String R_PAREN = ")";
private static final String NAN_SYMBOL = "NaN";
private final SpatialReferenceSystemEnum srs;
public WKTParser()
{
this( SpatialReferenceSystemEnum.DEFAULT );
}
public WKTParser( SpatialReferenceSystemEnum srs )
{
if( srs == null )
throw new IllegalArgumentException( "Spatial Reference System (SRS) cannot be null." );
this.srs = srs;
}
public Geometry read( String wellKnownText )
{
StringReader reader = new StringReader( wellKnownText );
StreamTokenizer tokenizer = createTokenizer( reader );
String type = getNextWord( tokenizer ).toUpperCase();
try
{
return readGeometryTaggedText( tokenizer, type );
}
catch( IOException e )
{
throw new WKTParseException( e );
}
}
private static StreamTokenizer createTokenizer( Reader reader )
{
StreamTokenizer tokenizer = new StreamTokenizer( reader );
// set tokenizer to NOT parse numbers
tokenizer.resetSyntax();
tokenizer.wordChars( 'a', 'z' );
tokenizer.wordChars( 'A', 'Z' );
tokenizer.wordChars( 128 + 32, 255 ); // unused characters
tokenizer.wordChars( '0', '9' );
tokenizer.wordChars( '-', '-' );
tokenizer.wordChars( '+', '+' );
tokenizer.wordChars( '.', '.' );
tokenizer.whitespaceChars( 0, ' ' ); // 0-32 whitespaces
tokenizer.commentChar( '#' );
return tokenizer;
}
private static String getNextWord( StreamTokenizer tokenizer )
{
try
{
int type = tokenizer.nextToken();
switch( type )
{
case StreamTokenizer.TT_WORD:
String word = tokenizer.sval;
if( word.equalsIgnoreCase( EMPTY ) )
return EMPTY;
return word;
case '(':
return L_PAREN;
case ')':
return R_PAREN;
case ',':
return COMMA;
}
throw new WKTParseException( "Unknown type: '" + (char) type + "'" );
}
catch( IOException e )
{
throw new WKTParseException( e );
}
}
private static String tokenString( StreamTokenizer tokenizer )
{
switch( tokenizer.ttype )
{
case StreamTokenizer.TT_NUMBER:
return "";
case StreamTokenizer.TT_EOL:
return "End-of-Line";
case StreamTokenizer.TT_EOF:
return "End-of-Stream";
case StreamTokenizer.TT_WORD:
return "'" + tokenizer.sval + "'";
}
return "'" + (char) tokenizer.ttype + "'";
}
private static String getNextEmptyOrOpener( StreamTokenizer tokenizer )
{
String nextWord = getNextWord( tokenizer );
if( nextWord.equals( EMPTY ) || nextWord.equals( L_PAREN ) )
return nextWord;
throw new WKTParseException( "Expected: " + EMPTY + " or " + L_PAREN );
}
private static String getNextCloserOrComma( StreamTokenizer tokenizer )
{
String nextWord = getNextWord( tokenizer );
if( nextWord.equals( COMMA ) || nextWord.equals( R_PAREN ) )
return nextWord;
throw new WKTParseException( "Expected: " + COMMA + " or " + R_PAREN );
}
private String getNextCloser( StreamTokenizer tokenizer )
{
String nextWord = getNextWord( tokenizer );
if( nextWord.equals( R_PAREN ) )
return nextWord;
throw new WKTParseException( "Expected: " + R_PAREN );
}
private static boolean isOpenerNext( StreamTokenizer tokenizer ) throws IOException
{
int type = tokenizer.nextToken();
tokenizer.pushBack();
return type == '(';
}
private static boolean isNumberNext( StreamTokenizer tokenizer ) throws IOException
{
int type = tokenizer.nextToken();
tokenizer.pushBack();
return type == StreamTokenizer.TT_WORD;
}
private double getNextNumber( StreamTokenizer tokenizer ) throws IOException
{
int type = tokenizer.nextToken();
if( type == StreamTokenizer.TT_WORD )
{
if( tokenizer.sval.equalsIgnoreCase( NAN_SYMBOL ) )
return Double.NaN;
else
{
try
{
return Double.parseDouble( tokenizer.sval );
}
catch( NumberFormatException ex )
{
throw new WKTParseException( "Invalid number: " + tokenizer.sval );
}
}
}
throw new WKTParseException( "Expected: number" );
}
private ArrayList getCoordinateSequence( StreamTokenizer tokenizer, boolean tryParen ) throws IOException
{
String nextWord = getNextEmptyOrOpener( tokenizer );
if( nextWord.equals( EMPTY ) )
return null;
ArrayList coordinates = new ArrayList<>();
do
{
coordinates.add( getCoordinate( tokenizer, tryParen ) );
}
while( getNextCloserOrComma( tokenizer ).equals( COMMA ) );
return coordinates;
}
private double[] getCoordinate( StreamTokenizer tokenizer, boolean tryParen ) throws IOException
{
boolean opened;
if( opened = tryParen && isOpenerNext( tokenizer ) )
tokenizer.nextToken();
double[] sequence = new double[ 2 ];
sequence[ 0 ] = getNextNumber( tokenizer );
sequence[ 1 ] = getNextNumber( tokenizer );
// read close token if it was opened here
if( opened )
getNextCloser( tokenizer );
return sequence;
}
private Geometry readGeometryTaggedText( StreamTokenizer tokenizer, String type ) throws IOException
{
if( type.startsWith( Point.WKT_TYPE ) )
{
return readPointText( tokenizer );
}
else if( type.startsWith( LineString.WKT_TYPE ) )
{
return readLineStringText( tokenizer );
}
else if( type.startsWith( Polygon.WKT_TYPE ) )
{
return readPolygonText( tokenizer );
}
/*
else if (type.startsWith("MULTIPOINT")) {
return readMultiPointText(tokenizer, ordinateFlags);
}
else if (type.startsWith("MULTILINESTRING")) {
return readMultiLineStringText(tokenizer, ordinateFlags);
}
else if (type.startsWith("MULTIPOLYGON")) {
return readMultiPolygonText(tokenizer, ordinateFlags);
}
else if (type.startsWith("GEOMETRYCOLLECTION")) {
return readGeometryCollectionText(tokenizer, ordinateFlags);
}
*/
throw new WKTParseException( "Unknown geometry type: '" + type + "'" );
}
private Point readPointText( StreamTokenizer tokenizer ) throws IOException
{
ArrayList coordinateSiequence = getCoordinateSequence( tokenizer, false );
if( coordinateSiequence == null )
return null;
return new Point( this.srs ).setX( coordinateSiequence.get( 0 )[ 0 ] ).setY( coordinateSiequence.get( 0 )[ 1 ] );
}
private LineString readLineStringText( StreamTokenizer tokenizer ) throws IOException
{
ArrayList coordinateSiequence = getCoordinateSequence( tokenizer, false );
if( coordinateSiequence == null )
return null;
ArrayList points = new ArrayList<>();
for( double[] coordinates : coordinateSiequence )
points.add( new Point( srs ).setX( coordinates[ 0 ] ).setY( coordinates[ 1 ] ) );
return new LineString( points, this.srs );
}
private Polygon readPolygonText( StreamTokenizer tokenizer ) throws IOException
{
String nextToken = getNextEmptyOrOpener( tokenizer );
if( nextToken.equals( EMPTY ) )
return null;
LineString shell = readLineStringText( tokenizer );
ArrayList holes = new ArrayList<>();
nextToken = getNextCloserOrComma( tokenizer );
while( nextToken.equals( COMMA ) )
{
LineString hole = readLineStringText( tokenizer );
holes.add( hole );
nextToken = getNextCloserOrComma( tokenizer );
}
return new Polygon( shell, holes, srs );
}
/*
private MultiPoint readMultiPointText( StreamTokenizer tokenizer )
{
return null;
}
private MultiLineString> readMultiLineStringText( StreamTokenizer tokenizer )
{
return null;
}
private MultiPolygon> readMultiPolygonText( StreamTokenizer tokenizer )
{
return null;
}
private List readGeometryCollectionText( StreamTokenizer tokenizer )
{
return null;
}
*/
public static class WKTParseException extends RuntimeException
{
public WKTParseException( String message )
{
super( message );
}
public WKTParseException( Throwable cause )
{
super( cause );
}
public WKTParseException( String message, Throwable cause )
{
super( message, cause );
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy