
cdc.util.rdb.tools.RdbMetaAnalyzer Maven / Gradle / Ivy
package cdc.util.rdb.tools;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import cdc.util.cli.AbstractMainSupport;
import cdc.util.cli.FeatureMask;
import cdc.util.cli.OptionEnum;
import cdc.util.rdb.FunctionResultType;
import cdc.util.rdb.ProcedureResultType;
import cdc.util.rdb.RdbCatalog;
import cdc.util.rdb.RdbDataType;
import cdc.util.rdb.RdbDatabase;
import cdc.util.rdb.RdbDatabase.BooleanProperty;
import cdc.util.rdb.RdbDatabase.EnumProperty;
import cdc.util.rdb.RdbDatabase.IntegerProperty;
import cdc.util.rdb.RdbDatabase.LongProperty;
import cdc.util.rdb.RdbDatabase.StringProperty;
import cdc.util.rdb.RdbDatabaseIo;
import cdc.util.rdb.RdbForeignKey;
import cdc.util.rdb.RdbForeignKeyColumn;
import cdc.util.rdb.RdbFunction;
import cdc.util.rdb.RdbFunctionColumn;
import cdc.util.rdb.RdbIndex;
import cdc.util.rdb.RdbIndexColumn;
import cdc.util.rdb.RdbIndexType;
import cdc.util.rdb.RdbPrimaryKey;
import cdc.util.rdb.RdbPrimaryKeyColumn;
import cdc.util.rdb.RdbProcedure;
import cdc.util.rdb.RdbProcedureColumn;
import cdc.util.rdb.RdbSchema;
import cdc.util.rdb.RdbTable;
import cdc.util.rdb.RdbTableColumn;
import cdc.util.rdb.RdbUserDataType;
import cdc.util.rdb.SqlDataType;
import cdc.util.rdb.YesNoUnknown;
import cdc.util.rdb.tools.RdbMetaAnalyzer.MainArgs.Feature;
/**
* Class use to analyze database metadata and produce a corresponding structure.
*
* @author Damien Carbonne
*
*/
public final class RdbMetaAnalyzer {
protected static final Logger LOGGER = LogManager.getLogger(RdbMetaAnalyzer.class);
private final MainArgs margs;
private final DatabaseMetaData metadata;
private final RdbDatabase database;
private CsvHandler csvHandler = null;
public static class MainArgs {
public enum Feature implements OptionEnum {
NO_PROPERTIES("no-properties", "Do not generate data related to properties."),
NO_DATA_TYPES("no-data-types", "Do not generate data related to data types."),
NO_USER_DATA_TYPES("no-user-data-types", "Do not generate data related to user data types."),
NO_ATTRIBUTES("no-attributes", "Do not generate data related to user data types attributes."),
NO_TABLE_TYPES("no-table-types", "Do not generate data related to table types."),
NO_TABLES("no-tables", "Do not generate data related to tables."),
NO_COLUMNS("no-columns", "Do not generate data related to table columns."),
NO_PRIMARY_KEYS("no-primary-keys", "Do not generate data related to primary-keys."),
NO_FOREIGN_KEYS("no-foreign-keys", "Do not generate data related to foreign-keys."),
NO_INDICES("no-indices", "Do not generate data related to indices."),
NO_FUNCTIONS("no-functions", "Do not generate data related to functions"),
NO_FUNCTION_COLUMNS("no-function-columns", "Do not generate data related to function columns"),
NO_PROCEDURES("no-procedures", "Do not generate data related to procedures"),
NO_PROCEDURE_COLUMNS("no-procedure-columns", "Do not generate data related to procedure columns");
private final String name;
private final String description;
private Feature(String name,
String description) {
this.name = name;
this.description = description;
}
@Override
public final String getName() {
return name;
}
@Override
public final String getDescription() {
return description;
}
}
/** Database url. */
public String url;
/** User name. */
public String user;
/** User password. */
public String password;
/** JDBC driver. */
public String driver;
/** Set of schemas for which data must be collected. */
public final Set schemas = new HashSet<>();
/** XML output file. */
public File xmlOutputFile;
/** CSV output dir */
public File csvOutputDir;
/** prefix */
public String prefix;
protected final FeatureMask features = new FeatureMask<>();
public final void setEnabled(Feature feature,
boolean enabled) {
features.setEnabled(feature, enabled);
}
public final boolean isEnabled(Feature feature) {
return features.isEnabled(feature);
}
}
private RdbMetaAnalyzer(MainArgs margs) throws SQLException {
LOGGER.info("Connect to: " + margs.url + " as: " + margs.user);
this.margs = margs;
if (margs.driver != null) {
try {
LOGGER.info("Load driver: " + margs.driver);
Class.forName(margs.driver);
} catch (final ClassNotFoundException e) {
LOGGER.error("Failed to load driver class: " + margs.driver, e);
}
}
try (final Connection connection = DriverManager.getConnection(margs.url, margs.user, margs.password)) {
metadata = connection.getMetaData();
LOGGER.info("Start Analysis");
database = new RdbDatabase(null);
analyzeDatabase();
}
LOGGER.info("Analysis finished");
}
@FunctionalInterface
private static interface Getter {
public O get(int index) throws SQLException;
}
private static O get(Getter getter,
int index,
int max,
O defaultValue) throws SQLException {
if (index <= max) {
return getter.get(index);
} else {
return defaultValue;
}
}
private boolean acceptsSchema(String schemaName) {
return margs.schemas.isEmpty() || margs.schemas.contains(schemaName);
}
private String getSchemaPattern() {
if (margs.schemas.size() == 1) {
return margs.schemas.iterator().next();
} else {
return null;
}
}
public static RdbDatabase execute(MainArgs margs) throws Exception {
final RdbMetaAnalyzer instance = new RdbMetaAnalyzer(margs);
if (margs.xmlOutputFile != null) {
LOGGER.info("Generate " + margs.xmlOutputFile);
RdbDatabaseIo.print(instance.database, margs.xmlOutputFile);
}
return instance.database;
}
private void analyzeDatabase() {
if (!margs.isEnabled(Feature.NO_PROPERTIES)) {
analyzeProperties();
}
if (!margs.isEnabled(Feature.NO_DATA_TYPES)) {
analyzeDataTypes();
}
if (!margs.isEnabled(Feature.NO_TABLE_TYPES)) {
analyzeTableTypes();
}
analyzeCatalogs();
analyzeSchemas();
if (!margs.isEnabled(Feature.NO_USER_DATA_TYPES)) {
analyzeUserDataTypes();
if (!margs.isEnabled(Feature.NO_ATTRIBUTES)) {
analyzeAttributes();
}
}
if (!margs.isEnabled(Feature.NO_FUNCTIONS)) {
analyzeFunctions();
if (!margs.isEnabled(Feature.NO_FUNCTION_COLUMNS)) {
analyzeFunctionColumns();
}
}
if (!margs.isEnabled(Feature.NO_PROCEDURES)) {
analyzeProcedures();
if (!margs.isEnabled(Feature.NO_PROCEDURE_COLUMNS)) {
analyzeProcedureColumns();
}
}
if (!margs.isEnabled(Feature.NO_TABLES)) {
analyzeTables();
if (!margs.isEnabled(Feature.NO_COLUMNS)) {
analyzeColumns();
}
if (!margs.isEnabled(Feature.NO_PRIMARY_KEYS)) {
analyzePrimaryKeys();
}
if (!margs.isEnabled(Feature.NO_FOREIGN_KEYS)) {
analyzeForeignKeys();
}
}
if (!margs.isEnabled(Feature.NO_INDICES)) {
analyzeIndices();
}
}
private static void error(Exception e,
Enum> property) {
LOGGER.error(property.getClass().getSimpleName() + " (" + property + ") analysis failed", e);
}
private void analyzeProperties() {
LOGGER.info("Analyze Properties");
// Enum properties
for (final EnumProperty property : EnumProperty.values()) {
final String name = property.getMethodName();
try {
final Method getter = DatabaseMetaData.class.getMethod(name);
final Enum> result = (Enum>) getter.invoke(metadata);
database.setProperty(property, result);
} catch (final Exception e) {
error(e, property);
}
}
// String properties
for (final StringProperty property : StringProperty.values()) {
final String name = property.getMethodName();
try {
final Method getter = DatabaseMetaData.class.getMethod(name);
final String result = (String) getter.invoke(metadata);
database.setProperty(property, result);
} catch (final Exception e) {
error(e, property);
}
}
// Boolean properties
for (final BooleanProperty property : BooleanProperty.values()) {
final String name = property.getMethodName();
try {
final Method getter = DatabaseMetaData.class.getMethod(name);
final boolean result = (Boolean) getter.invoke(metadata);
database.setProperty(property, result);
} catch (final Exception e) {
error(e, property);
}
}
// Integer properties
for (final IntegerProperty property : IntegerProperty.values()) {
final String name = property.getMethodName();
try {
final Method getter = DatabaseMetaData.class.getMethod(name);
final int result = (Integer) getter.invoke(metadata);
database.setProperty(property, result);
} catch (final Exception e) {
error(e, property);
}
}
// Long properties
for (final LongProperty property : LongProperty.values()) {
final String name = property.getMethodName();
try {
final Method getter = DatabaseMetaData.class.getMethod(name);
final long result = (Long) getter.invoke(metadata);
database.setProperty(property, result);
} catch (final Exception e) {
error(e, property);
}
}
}
private File getCsvFile(String basename) {
return new File(margs.csvOutputDir, (margs.prefix == null ? "meta-" : margs.prefix) + basename + ".csv");
}
private void startCsv(String basename) {
if (margs.csvOutputDir != null) {
margs.csvOutputDir.mkdir();
try {
csvHandler = new CsvHandler(getCsvFile(basename));
} catch (final IOException e) {
LOGGER.catching(e);
csvHandler = null;
}
}
}
private void addCsv(ResultSet rs) {
if (margs.csvOutputDir != null && csvHandler != null) {
try {
csvHandler.add(rs);
} catch (SQLException | IOException e) {
LOGGER.catching(e);
}
}
}
private void stopCsv() {
if (margs.csvOutputDir != null && csvHandler != null) {
try {
csvHandler.close();
} catch (final IOException e) {
LOGGER.catching(e);
}
}
}
@FunctionalInterface
private static interface ResultSetSupplier {
public ResultSet get() throws SQLException;
}
@FunctionalInterface
private static interface TableResultSetSupplier {
public ResultSet get(String catalog,
String schema,
String table) throws SQLException;
}
@FunctionalInterface
private static interface ResultSetAnalyzer {
public void analyze(ResultSet rs,
int max) throws SQLException;
}
@FunctionalInterface
private static interface TableResultSetAnalyzer {
public void analyze(ResultSet rs,
int max,
RdbTable table) throws SQLException;
}
private void analyzeResultSet(String title,
ResultSetSupplier supplier,
ResultSetAnalyzer analyzer) {
LOGGER.info("Analyze " + title);
final String csv = title.toLowerCase().replaceAll(" ", "-");
startCsv(csv);
try (final ResultSet rs = supplier.get()) {
final int max = rs.getMetaData().getColumnCount();
while (rs.next()) {
analyzer.analyze(rs, max);
}
} catch (final SQLException e) {
LOGGER.error(title + " failed", e);
}
stopCsv();
}
private void analyzeTableResultSet(String title,
TableResultSetSupplier supplier,
TableResultSetAnalyzer analyzer) {
LOGGER.info("Analyze " + title);
final String csv = title.toLowerCase().replaceAll(" ", "-");
startCsv(csv);
try (final ResultSet trs = metadata.getTables(null, getSchemaPattern(), null, null)) {
while (trs.next()) {
final String schemaName = trs.getString(2);
if (acceptsSchema(schemaName)) {
final String catalogName = trs.getString(1);
final String tableName = trs.getString(3);
final RdbCatalog catalog = database.getCatalog(catalogName);
final RdbSchema schema = catalog.getSchema(schemaName);
final RdbTable table = schema.getTable(tableName);
try (final ResultSet rs = supplier.get(schemaName, catalogName, tableName)) {
final int max = rs.getMetaData().getColumnCount();
while (rs.next()) {
analyzer.analyze(rs, max, table);
}
}
}
}
} catch (final SQLException e) {
LOGGER.error(title + " failed", e);
}
stopCsv();
}
private void analyzeDataTypes() {
analyzeResultSet("Data Types",
metadata::getTypeInfo,
this::analyzeDataType);
}
private void analyzeDataType(ResultSet rs,
int max) throws SQLException {
// 1: Type name (string)
// 2: SQL data type from java.sql.Types (int)
// 3: maximum precision (int)
// 4: literal prefix (string)
// 5: literal suffix (string)
// 6: create params (string)
// 7: nullable (short)
// 8: case sensitive (boolean)
// 9: searchable (short)
// 10: unsigned attribute (boolean)
// 11: fixed precision scale (boolean)
// 12: auto increment (boolean)
// 13: local type name (string)
// 14: minimum scale (short)
// 15: maximum scale (short)
// 16: sql data type (int, unused)
// 17: sql datetime sub (int, unused)
// 18: radix (int)
addCsv(rs);
final String typeName = get(rs::getString, 1, max, null);
final int dataType = get(rs::getInt, 2, max, -1);
final int maxPrecision = get(rs::getInt, 3, max, -1);
final String literalPrefix = get(rs::getString, 4, max, null);
final String literalSuffix = get(rs::getString, 5, max, null);
final String createParams = get(rs::getString, 6, max, null);
final short nullabelCode = get(rs::getShort, 7, max, (short) -1);
final boolean caseSensitive = get(rs::getBoolean, 8, max, false);
final boolean unsigned = get(rs::getBoolean, 10, max, false);
final boolean fixedPrecisionScale = get(rs::getBoolean, 11, max, false);
final boolean autoIncrement = get(rs::getBoolean, 12, max, false);
final String localizedName = get(rs::getString, 13, max, null);
final short minScale = get(rs::getShort, 14, max, (short) -1);
final short maxScale = get(rs::getShort, 15, max, (short) -1);
final int radix = get(rs::getInt, 18, max, -1);
// TODO other attributes
try {
final RdbDataType type = database.createDataType(typeName, SqlDataType.decode(dataType));
type.setMaxPrecison(maxPrecision);
type.setLiteralPrefix(literalPrefix);
type.setLiteralSuffix(literalSuffix);
type.setCreateParams(createParams);
type.setNullable(YesNoUnknown.decode(nullabelCode));
type.setCaseSensitive(caseSensitive);
type.setUnsigned(unsigned);
type.setFixedPrecisionScale(fixedPrecisionScale);
type.setAutoIncrement(autoIncrement);
type.setLocalizedName(localizedName);
type.setMinScale(minScale);
type.setMaxScale(maxScale);
type.setRadix(radix);
} catch (final Exception e) {
LOGGER.error("Failed to create data type", e);
}
}
private void analyzeUserDataTypes() {
analyzeResultSet("User Data Types",
() -> metadata.getUDTs(null, getSchemaPattern(), null, null),
this::analyzeUserDataType);
}
private void analyzeUserDataType(ResultSet rs,
int max) throws SQLException {
// 1: type catalog (string, may be null)
// 2: type schema (string, may be null)
// 3: type name (string)
// 4: class name (string)
// 5: data type (int)
// 6: comments (string)
// 7: base type (short)
addCsv(rs);
final String schemaName = get(rs::getString, 2, max, null);
if (acceptsSchema(schemaName)) {
final String catalogName = get(rs::getString, 1, max, null);
final String typeName = get(rs::getString, 3, max, null);
final String className = get(rs::getString, 4, max, null);
final int dataTypeCode = get(rs::getInt, 5, max, -1);
final String comments = get(rs::getString, 6, max, null);
// final short baseTypeCode = rs.getShort(7);
// TODO other attributes
try {
final RdbCatalog catalog = database.getCatalog(catalogName);
final RdbSchema schema = catalog.getSchema(schemaName);
final RdbUserDataType type = schema.createUserDataType(typeName);
type.setClassName(className);
type.setComments(comments);
type.setType(SqlDataType.decode(dataTypeCode));
} catch (final Exception e) {
LOGGER.error("Failed to create user data type", e);
}
}
}
private void analyzeAttributes() {
analyzeResultSet("Attributes",
() -> metadata.getAttributes(null, getSchemaPattern(), null, null),
this::analyzeAttribute);
}
private void analyzeAttribute(ResultSet rs,
int max) throws SQLException {
// 1: type catalog (string, may be null)
// 2: type schema (string, may be null)
// 3: type name (string)
// 4: attribute name (string)
// 5: data type (int)
// 6: attribute type name (string)
// 7: attribute size (int)
// 8: decimal digits (int)
// 9: radix (int)
// 10: nullable (int)
// 11: comments (string)
// 12: attribute default value
// 13: sql data type (unused)
// 14: sql datetime sub (unused)
// 15: char octet length (int)
// 16: ordinal position (int)
// 17: is nullable (string)
// 18: scope catalog (string)
// 19: scope schema (string)
// 20: scope table (string)
// 21: source data type (short)
addCsv(rs);
// TODO
}
private void analyzeFunctions() {
analyzeResultSet("Functions",
() -> metadata.getFunctions(null, getSchemaPattern(), null),
this::analyzeFunction);
}
private void analyzeFunction(ResultSet rs,
int max) throws SQLException {
// 1: function catalog (string, may be null)
// 2: function schema (string, may be null)
// 3: function name (string)
// 4: comments (string)
// 5: function type (short)
// 6: specific name (string)
final String schemaName = get(rs::getString, 2, max, null);
if (acceptsSchema(schemaName)) {
addCsv(rs);
final String catalogName = get(rs::getString, 1, max, null);
final String functionName = get(rs::getString, 3, max, null);
final String comments = get(rs::getString, 4, max, null);
final short resultTypeCode = get(rs::getShort, 5, max, (short) -1);
final String specificName = get(rs::getString, 6, max, null);
try {
final RdbCatalog catalog = database.getOrCreateCatalog(catalogName);
final RdbSchema schema = catalog.getOrCreateSchema(schemaName);
final RdbFunction function = schema.createFunction(functionName);
function.setComments(comments);
function.setResultType(FunctionResultType.decode(resultTypeCode));
function.setSpecificName(specificName);
} catch (final Exception e) {
LOGGER.error("Failed to create function", e);
}
}
}
private void analyzeFunctionColumns() {
analyzeResultSet("Function Columns",
() -> metadata.getFunctionColumns(null, getSchemaPattern(), null, null),
this::analyzeFunctionColumn);
}
private void analyzeFunctionColumn(ResultSet rs,
int max) throws SQLException {
// 1: function catalog (string, may be null)
// 2: function schema (string, may be null)
// 3: function name (string)
// 4: column name (string)
// 5: column type (short)
// 6: data type (int)
// 7: type name (string)
// 8: precision (int)
// 9: length (int)
// 10: scale (short)
// 11: radix (short)
// 12: nullable (short)
// 13: comments (string)
// 14: char octet length (int)
// 15: ordinal (int)
// 16: is nullable (string)
// 17: function specific name (string)
// PostgreSQL driver 42.2.1 returns wrong result set.
// In addition, catalog is wrong.
final int shift;
if (rs.getMetaData().getColumnCount() == 20) {
shift = 3;
} else {
shift = 0;
}
final String schemaName = rs.getString(2);
if (acceptsSchema(schemaName)) {
addCsv(rs);
final String catalogName = shift == 0 ? get(rs::getString, 1, max, null) : null;
// final String functionName = rs.getString(3);
final String columnName = get(rs::getString, 4, max, null);
final String comments = get(rs::getString, 13, max, null);
final String functionSpecificName = get(rs::getString, 17 + shift, max, null);
try {
final RdbCatalog catalog = database.getCatalog(catalogName);
final RdbSchema schema = catalog.getSchema(schemaName);
final RdbFunction function = schema.getFunction(functionSpecificName);
final RdbFunctionColumn column = function.createColumn(columnName);
column.setComments(comments);
// TODO
} catch (final Exception e) {
LOGGER.error("Failed to create column '" + columnName + "' for '" + functionSpecificName + "'", e);
}
}
}
private void analyzeProcedures() {
analyzeResultSet("Procedures",
() -> metadata.getProcedures(null, getSchemaPattern(), null),
this::analyzeProcedure);
}
private void analyzeProcedure(ResultSet rs,
int max) throws SQLException {
// 1: procedure catalog (string, may be null)
// 2: procedure schema (string, may be null)
// 3: procedure name (string)
// 4: reserved
// 5: reserved
// 6: reserved
// 7: comments (string)
// 8: procedure type (short)
// 9: specific name (string)
final String schemaName = get(rs::getString, 2, max, null);
if (acceptsSchema(schemaName)) {
addCsv(rs);
final String catalogName = get(rs::getString, 1, max, null);
final String procedureName = get(rs::getString, 3, max, null);
final String comments = get(rs::getString, 7, max, null);
final short resultTypeCode = get(rs::getShort, 8, max, (short) -1);
final String specificName = get(rs::getString, 9, max, null);
try {
final RdbCatalog catalog = database.getOrCreateCatalog(catalogName);
final RdbSchema schema = catalog.getOrCreateSchema(schemaName);
final RdbProcedure procedure = schema.createProcedure(procedureName);
procedure.setComments(comments);
procedure.setSpecificName(specificName);
procedure.setResultType(ProcedureResultType.decode(resultTypeCode));
} catch (final Exception e) {
LOGGER.error("Failed to create procedure", e);
}
}
}
private void analyzeProcedureColumns() {
analyzeResultSet("Procedure Columns",
() -> metadata.getProcedureColumns(null, getSchemaPattern(), null, null),
this::analyzeProcedureColumn);
}
private void analyzeProcedureColumn(ResultSet rs,
int max) throws SQLException {
// 1: procedure catalog (string, may be null)
// 2: procedure schema (string, may be null)
// 3: procedure name (string)
// 4: column name (string)
// 5: column type (short)
// 6: data type (int)
// 7: type name (string)
// 8: precision (int)
// 9: length (int)
// 10: scale (short)
// 11: radix (short)
// 12: nullable (short)
// 13: comments (string)
// 14: default value (string)
// 15: sql data type (int, reserved)
// 16: sql datetime sub (int, reserved)
// 17: char octet length (int)
// 18: ordinal (int)
// 19: is nullable (string)
// 20: procedure specific name (string)
final String schemaName = get(rs::getString, 2, max, null);
if (acceptsSchema(schemaName)) {
addCsv(rs);
final String catalogName = get(rs::getString, 1, max, null);
// final String procedureName = rs.getString(3);
final String columnName = get(rs::getString, 4, max, null);
final String comments = get(rs::getString, 13, max, null);
final String procedureSpecificName = get(rs::getString, 20, max, null);
try {
final RdbCatalog catalog = database.getCatalog(catalogName);
final RdbSchema schema = catalog.getSchema(schemaName);
final RdbProcedure procedure = schema.getProcedure(procedureSpecificName);
final RdbProcedureColumn column = procedure.createColumn(columnName);
column.setComments(comments);
// TODO
} catch (final Exception e) {
LOGGER.error("Failed to create column '" + columnName + "' for '" + procedureSpecificName + "'", e);
}
}
}
private void analyzeTableTypes() {
analyzeResultSet("Table Types",
metadata::getTableTypes,
this::analyzeTableType);
}
private void analyzeTableType(ResultSet rs,
int max) throws SQLException {
// 1: table type (string)
addCsv(rs);
final String tableType = get(rs::getString, 1, max, null);
try {
database.createTableType(tableType);
} catch (final Exception e) {
LOGGER.error("Failed to create table type", e);
}
}
private void analyzeCatalogs() {
analyzeResultSet("Catalogs",
metadata::getCatalogs,
this::analyzeCatalog);
}
private void analyzeCatalog(ResultSet rs,
int max) throws SQLException {
// 1: catalog name (string)
addCsv(rs);
final String catalogName = get(rs::getString, 1, max, null);
try {
database.createCatalog(catalogName);
} catch (final Exception e) {
LOGGER.error("Failed to create catalog", e);
}
}
private void analyzeSchemas() {
analyzeResultSet("Schemas",
metadata::getSchemas,
this::analyzeSchema);
}
private void analyzeSchema(ResultSet rs,
int max) throws SQLException {
// 1: table schema (string)
// 2: table catalog (string, may be null)
final String schemaName = get(rs::getString, 1, max, null);
if (acceptsSchema(schemaName)) {
addCsv(rs);
final String catalogName = get(rs::getString, 2, max, null); // May be null
try {
final RdbCatalog catalog = database.getOrCreateCatalog(catalogName);
catalog.createSchema(schemaName);
} catch (final Exception e) {
LOGGER.error("Failed to create catalog '" + catalogName + "'", e);
}
}
}
private void analyzeTables() {
analyzeResultSet("Tables",
() -> metadata.getTables(null, getSchemaPattern(), null, null),
this::analyzeTable);
}
private void analyzeTable(ResultSet rs,
int max) throws SQLException {
// 1: table catalog (string, may be null)
// 2: table schema (string, may be null)
// 3: table name (string)
// 4: table type (string)
// 5: comments (string)
// 6: type catalog
// 7: type schema
// 8: type name
// 9: self referencing column name (string)
// 10: ref generation (string)
final String schemaName = get(rs::getString, 2, max, null); // May be null
if (acceptsSchema(schemaName)) {
addCsv(rs);
final String catalogName = get(rs::getString, 1, max, null); // May be null
final String tableName = get(rs::getString, 3, max, null);
final String tableType = get(rs::getString, 4, max, null);
final String comments = get(rs::getString, 5, max, null);
try {
final RdbCatalog catalog = database.getOrCreateCatalog(catalogName);
final RdbSchema schema = catalog.getOrCreateSchema(schemaName);
final RdbTable table = schema.createTable(tableName);
table.setTableTypeName(tableType);
table.setComments(comments);
// TODO Add other table attributes
} catch (final Exception e) {
LOGGER.error("Failed to create table '" + tableName + "'", e);
}
}
}
private void analyzeColumns() {
analyzeResultSet("Columns",
() -> metadata.getColumns(null, getSchemaPattern(), null, null),
this::analyzeColumn);
}
private void analyzeColumn(ResultSet rs,
int max) throws SQLException {
// 1: table catalog (string, may be null)
// 2: table schema (string, may be null)
// 3: table name (string)
// 4: column name (string)
// 5: data type (int, SQL type from java.sql.Types)
// 6: type name (string)
// 7: column size (int)
// 8: buffer length (not used)
// 9: decimal digits (int)
// 10: radix (int)
// 11: nullable (int) --> 18
// 12: comments (string)
// 13: default value (string)
// 14: sql data type (int, unused)
// 15: sql datetime sub (unused)
// 16: char octet length (int)
// 17: column index (int)
// 18: is nullable (string) --> 11
// 19: scope catalog (string)
// 20: scope schema (string)
// 21: scope table (string)
// 22: source data type (short)
// 23: is auto increment (string)
// 24: is generated (string)
final String schemaName = get(rs::getString, 2, max, null); // May be null
if (acceptsSchema(schemaName)) {
addCsv(rs);
final String catalogName = get(rs::getString, 1, max, null); // May be null
final String tableName = get(rs::getString, 3, max, null);
final String columnName = get(rs::getString, 4, max, null);
final int dataTypeCode = get(rs::getInt, 5, max, -1);
final String typeName = get(rs::getString, 6, max, null);
final int size = get(rs::getInt, 7, max, -1);
final int digits = get(rs::getInt, 9, max, -1);
final int radix = get(rs::getInt, 10, max, -1);
final String comments = get(rs::getString, 12, max, null);
final String defaultValue = get(rs::getString, 13, max, null);
final int maxCharBytes = get(rs::getInt, 16, max, -1);
final int columnOrdinal = get(rs::getInt, 17, max, -1);
final String nullable = get(rs::getString, 18, max, null);
final String scopeCatalogName = get(rs::getString, 19, max, null); // May be null
final String scopeSchemaName = get(rs::getString, 20, max, null); // May be null
final String scopeTableName = get(rs::getString, 21, max, null); // May be null
final short sourceDataTypeCode = get(rs::getShort, 22, max, (short) -1);
final String autoIncrement = get(rs::getString, 23, max, null);
final String generated = get(rs::getString, 24, max, null);
final RdbCatalog catalog = database.getOptionalCatalog(catalogName);
final RdbSchema schema = catalog.getOptionalSchema(schemaName);
final RdbTable table = schema.getOptionalTable(tableName);
try {
final RdbTableColumn column = table.createColumn(columnName);
column.setComments(comments);
column.setDataType(SqlDataType.decode(dataTypeCode));
column.setTypeName(typeName);
column.setSize(size);
column.setDigits(digits);
column.setRadix(radix);
column.setNullable(YesNoUnknown.decode(nullable));
column.setDefaultValue(defaultValue);
column.setOrdinal(columnOrdinal);
column.setAutoIncrement(YesNoUnknown.decode(autoIncrement));
column.setGenerated(YesNoUnknown.decode(generated));
// TODO Add other column attributes
} catch (final Exception e) {
LOGGER.error("Failed to create table column '" + columnName + "'", e);
}
}
}
private void analyzePrimaryKeys() {
analyzeTableResultSet("Primary Keys",
metadata::getPrimaryKeys,
this::analyzePrimaryKey);
}
private void analyzePrimaryKey(ResultSet rs,
int max,
RdbTable table) throws SQLException {
// 1: table catalog (string, may be null)
// 2: table schema (string, may be null)
// 3: table name (string)
// 4: column name (string)
// 5: key sequence (short)
// 6: key name (string, may be null)
addCsv(rs);
final String columnName = get(rs::getString, 4, max, null);
final short keyOrdinal = get(rs::getShort, 5, max, (short) -1);
final String keyName = get(rs::getString, 6, max, null);
try {
final RdbPrimaryKey key = table.getOrCreatePrimaryKey(keyName);
final RdbPrimaryKeyColumn keyColumn = key.createColumn(columnName);
keyColumn.setOrdinal(keyOrdinal);
} catch (final Exception e) {
LOGGER.error("Failed to create primary key '" + keyName + "'", e);
}
}
private void analyzeForeignKeys() {
analyzeTableResultSet("Foreign Keys",
metadata::getImportedKeys,
this::analyzeForeignKey);
}
private void analyzeForeignKey(ResultSet rs,
int max,
RdbTable table) throws SQLException {
// 1: PK table catalog name being imported.
// 2: PK table schema name being imported.
// 3: PK table name being imported.
// 4: PK column name being imported.
// 5: FK table catalog name
// 6: FK table schema name
// 7: FK table name
// 8: FK column name
// 9: key sequence (short)
// 10: update rule (short)
// 11: delete rule (short)
// 12: FK name (string)
// 13: PK name (string)
// 14: deferrability (short)
addCsv(rs);
final String pkCatalogName = get(rs::getString, 1, max, null);
final String pkSchemaName = get(rs::getString, 2, max, null);
final String pkTableName = get(rs::getString, 3, max, null);
final String pkColumnName = get(rs::getString, 4, max, null);
final String fkColumnName = get(rs::getString, 8, max, null);
final short fkOrdinal = get(rs::getShort, 9, max, (short) -1);
final String fkName = get(rs::getString, 12, max, null);
final RdbForeignKey fk = table.getOrCreateForeignKey(fkName);
fk.setRefCatalogName(pkCatalogName);
fk.setRefSchemaName(pkSchemaName);
fk.setRefTableName(pkTableName);
final RdbForeignKeyColumn fkColumn = fk.createColumn(fkColumnName);
fkColumn.setOrdinal(fkOrdinal);
fkColumn.setRefColumnName(pkColumnName);
// TODO Add other fk attributes
}
private void analyzeIndices() {
analyzeTableResultSet("Indices",
(c,
s,
t) -> metadata.getIndexInfo(c, s, t, false, false),
this::analyzeIndex);
}
private void analyzeIndex(ResultSet rs,
int max,
RdbTable table) throws SQLException {
// 1: table catalog (string, may be null)
// 2: table schema (string, may be null)
// 3: table name (string)
// 4: non unique (boolean)
// 5: index qualifier (string)
// 6: index name (string)
// 7: type (short)
// 8: ordinal position (short)
// 9: column name (string)
// 10: asc or desc (string)
// 11: cardinality (long)
// 12: pages (long)
// 13: filter condition (string)
addCsv(rs);
final String indexName = get(rs::getString, 6, max, null);
final short indexTypeCode = get(rs::getShort, 7, max, (short) -1);
final short ordinal = get(rs::getShort, 8, max, (short) -1);
final String columnName = get(rs::getString, 9, max, null);
final RdbIndex index = table.getOrCreateIndex(indexName);
index.setType(RdbIndexType.decode(indexTypeCode));
final RdbIndexColumn column = index.createColumn(columnName);
column.setOrdinal(ordinal);
// TODO Add other index attributes
}
public static void main(String[] args) {
final MainSupport support = new MainSupport();
support.main(args);
}
private static class MainSupport extends AbstractMainSupport {
private static final String SCHEMA = "schema";
private static final String XML_OUTPUT = "xml";
private static final String CSV_OUTPUT = "csv";
public MainSupport() {
super(RdbMetaAnalyzer.class, LOGGER);
}
@Override
protected String getVersion() {
return cdc.util.Config.VERSION;
}
@Override
protected void addSpecificOptions(Options options) {
options.addOption(Option.builder()
.longOpt(URL)
.desc("URL to access database.")
.hasArg()
.required()
.build());
options.addOption(Option.builder()
.longOpt(DRIVER)
.desc("Optional JDBC Driver class.")
.hasArg()
.build());
options.addOption(Option.builder()
.longOpt(USER)
.desc("Optional user name.")
.hasArg()
.build());
options.addOption(Option.builder("pwd")
.longOpt(PASSWORD)
.desc("Optional user password.")
.hasArg()
.build());
options.addOption(Option.builder()
.longOpt(SCHEMA)
.desc("Optional set of schemas that must be analyzed. When empty, all schemas are analyzed.")
.hasArgs()
.build());
options.addOption(Option.builder()
.longOpt(XML_OUTPUT)
.desc("Optional XML output filename.")
.hasArg()
.build());
options.addOption(Option.builder()
.longOpt(CSV_OUTPUT)
.desc("Optional CSV output dirname.")
.hasArg()
.build());
options.addOption(Option.builder()
.longOpt(PREFIX)
.desc("Optional prefix for file names.")
.hasArg()
.build());
addNoArgOptions(options, MainArgs.Feature.class);
}
@Override
protected MainArgs analyze(CommandLine cl) throws ParseException {
final MainArgs margs = new MainArgs();
margs.url = cl.getOptionValue(URL);
margs.user = cl.getOptionValue(USER);
margs.password = cl.getOptionValue(PASSWORD);
margs.driver = cl.getOptionValue(DRIVER);
if (cl.hasOption(SCHEMA)) {
for (final String s : cl.getOptionValues(SCHEMA)) {
margs.schemas.add(s);
}
}
margs.xmlOutputFile = getValueAsFile(cl, XML_OUTPUT, null);
margs.csvOutputDir = getValueAsFile(cl, CSV_OUTPUT, null);
margs.prefix = cl.getOptionValue(PREFIX);
setMask(cl, MainArgs.Feature.class, margs.features::setEnabled);
return margs;
}
@Override
protected Void execute(MainArgs margs) throws Exception {
RdbMetaAnalyzer.execute(margs);
return null;
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy