net.datafaker.transformations.sql.SqlDialect Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of datafaker Show documentation
Show all versions of datafaker Show documentation
This library is a improved port of JavaFaker (as well as Ruby's stympy/faker gem and Perl's Data::Faker library) that
generates fake data.
It's useful when you're developing a new project and need some pretty data for showcase.
package net.datafaker.transformations.sql;
import java.util.function.Function;
import java.util.function.Supplier;
import static net.datafaker.transformations.Transformer.LINE_SEPARATOR;
import static net.datafaker.transformations.sql.Casing.DEFAULT_CASING;
import static net.datafaker.transformations.sql.SqlDialect.AuxiliaryConstants.DEFAULT_FIRST_ROW;
import static net.datafaker.transformations.sql.SqlDialect.AuxiliaryConstants.DEFAULT_OTHER_ROWS;
import static net.datafaker.transformations.sql.SqlTransformer.SQLKeyWords.INSERT_ALL;
import static net.datafaker.transformations.sql.SqlTransformer.SQLKeyWords.INSERT_INTO;
import static net.datafaker.transformations.sql.SqlTransformer.SQLKeyWords.INTO;
import static net.datafaker.transformations.sql.SqlTransformer.SQLKeyWords.SELECT_1_FROM_DUAL;
import static net.datafaker.transformations.sql.SqlTransformer.SQLKeyWords.VALUES;
public enum SqlDialect {
ANSI("`"),
BIGQUERY("`", Casing.UNCHANGED),
CALCITE("\""),
CLICKHOUSE("`"),
EXASOL("\""),
FIREBOLT("\""),
H2("\""),
INFOBRIGHT("`"),
LUCIDDB("\""),
MARIADB("`", Casing.TO_LOWER),
MSSQL("[]"),
MYSQL("`", Casing.UNCHANGED),
NETEZZA("\""),
ORACLE("\"", Casing.TO_UPPER,
(columns, values, caze) -> {
final String insertAll = INSERT_ALL.getValue(caze) + System.lineSeparator() + " " + INTO.getValue(caze) + " ";
final String valuesKeyWord = " " + VALUES.getValue(caze) + " ";
return insertAll + columns.get() + valuesKeyWord + values.get();
},
(columns, values, caze) -> {
final String into = " " + INTO.getValue(caze) + " ";
final String valuesKeyWord = " " + VALUES.getValue(caze) + " ";
return into + columns.get() + valuesKeyWord + values.get();
}, caze -> LINE_SEPARATOR + SELECT_1_FROM_DUAL.getValue(caze)),
PARACCEL("\""),
PHOENIX("\""),
POSTGRES("\""),
PRESTO("\"", Casing.UNCHANGED),
REDSHIFT("\"", Casing.TO_LOWER),
SNOWFLAKE("\""),
TERADATA("\""),
VERTICA("\"", Casing.UNCHANGED);
private final String sqlQuoteIdentifier;
private final Casing unquotedCasing;
private final TriFunction, Supplier, SqlTransformer.Case, String> batchFirstRow;
private final TriFunction, Supplier, SqlTransformer.Case, String> batchOtherRows;
private final Function lastBatchRow;
private static final String DEFAULT_BEFORE_EACH_BATCH_PREFIX = " ";
SqlDialect(String sqlQuoteIdentifier, Casing casing, TriFunction, Supplier, SqlTransformer.Case, String> batchFirstRow,
TriFunction, Supplier, SqlTransformer.Case, String> batchOtherRows, Function lastBatchRow) {
this.sqlQuoteIdentifier = sqlQuoteIdentifier;
this.unquotedCasing = casing;
this.batchFirstRow = batchFirstRow;
this.batchOtherRows = batchOtherRows;
this.lastBatchRow = lastBatchRow;
}
SqlDialect(String sqlQuoteIdentifier, TriFunction, Supplier, SqlTransformer.Case, String> batchFirstRow,
TriFunction, Supplier, SqlTransformer.Case, String> batchOtherRows, Function lastBatchRow) {
this(sqlQuoteIdentifier, DEFAULT_CASING, batchFirstRow, batchOtherRows, lastBatchRow);
}
SqlDialect(String sqlQuoteIdentifier, Casing casing) {
this(sqlQuoteIdentifier, casing,
DEFAULT_FIRST_ROW,
DEFAULT_OTHER_ROWS,
s -> "");
}
SqlDialect(String sqlQuoteIdentifier) {
this(sqlQuoteIdentifier, DEFAULT_CASING);
}
public String getSqlQuoteIdentifier() {
return sqlQuoteIdentifier;
}
public Casing getUnquotedCasing() {
return unquotedCasing;
}
public static String getFirstRow(SqlDialect dialect, Supplier input, Supplier input2, SqlTransformer.Case keywordCase) {
return dialect == null
? DEFAULT_FIRST_ROW.apply(input, input2, keywordCase) : dialect.batchFirstRow.apply(input, input2, keywordCase);
}
public static String getOtherRow(SqlDialect dialect, Supplier input, Supplier input2, SqlTransformer.Case keywordCase) {
return dialect == null
? DEFAULT_OTHER_ROWS.apply(input, input2, keywordCase) : dialect.batchOtherRows.apply(input, input2, keywordCase);
}
public static String getLastRowSuffix(SqlDialect dialect, SqlTransformer.Case caze) {
return dialect == null ? "" : dialect.lastBatchRow.apply(caze);
}
static class AuxiliaryConstants {
static final TriFunction, Supplier, SqlTransformer.Case, String> DEFAULT_FIRST_ROW = (supplier, supplier2, caze) -> {
final String insertAll = INSERT_INTO.getValue(caze) + " ";
final String values = LINE_SEPARATOR + VALUES.getValue(caze) + " ";
return insertAll + supplier.get() + values + supplier2.get();
};
static final TriFunction, Supplier, SqlTransformer.Case, String> DEFAULT_OTHER_ROWS =
(supplier, supplier2, caze) -> DEFAULT_BEFORE_EACH_BATCH_PREFIX + supplier2.get();
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy