All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.hibernate.metamodel.relational.Table Maven / Gradle / Ivy

There is a newer version: 7.0.0.Alpha1
Show newest version
/*
 * Hibernate, Relational Persistence for Idiomatic Java
 *
 * Copyright (c) 2010 by Red Hat Inc and/or its affiliates or by
 * third-party contributors as indicated by either @author tags or express
 * copyright attribution statements applied by the authors.  All
 * third-party contributions are distributed under license by Red Hat Inc.
 *
 * This copyrighted material is made available to anyone wishing to use, modify,
 * copy, or redistribute it subject to the terms and conditions of the GNU
 * Lesser General Public License, as published by the Free Software Foundation.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
 * or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 * for more details.
 *
 * You should have received a copy of the GNU Lesser General Public License
 * along with this distribution; if not, write to:
 * Free Software Foundation, Inc.
 * 51 Franklin Street, Fifth Floor
 * Boston, MA  02110-1301  USA
 */
package org.hibernate.metamodel.relational;

import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;

import org.hibernate.dialect.Dialect;

/**
 * Models the concept of a relational TABLE (or VIEW).
 *
 * @author Gavin King
 * @author Steve Ebersole
 */
public class Table extends AbstractTableSpecification implements Exportable {
	private final Schema database;
	private final Identifier tableName;
	private final ObjectName objectName;
	private final String qualifiedName;

	private final LinkedHashMap indexes = new LinkedHashMap();
	private final LinkedHashMap uniqueKeys = new LinkedHashMap();
	private final List checkConstraints = new ArrayList();
	private final List comments = new ArrayList();

	public Table(Schema database, String tableName) {
		this( database, Identifier.toIdentifier( tableName ) );
	}

	public Table(Schema database, Identifier tableName) {
		this.database = database;
		this.tableName = tableName;
		objectName = new ObjectName( database.getName().getSchema(), database.getName().getCatalog(), tableName );
		this.qualifiedName = objectName.toText();
	}

	@Override
	public Schema getSchema() {
		return database;
	}

	public Identifier getTableName() {
		return tableName;
	}

	@Override
	public String getLoggableValueQualifier() {
		return qualifiedName;
	}

	@Override
	public String getExportIdentifier() {
		return qualifiedName;
	}

	@Override
	public String toLoggableString() {
		return qualifiedName;
	}

	@Override
	public Iterable getIndexes() {
		return indexes.values();
	}

	public Index getOrCreateIndex(String name) {
		if( indexes.containsKey( name ) ){
			return indexes.get( name );
		}
		Index index = new Index( this, name );
		indexes.put(name, index );
		return index;
	}

	@Override
	public Iterable getUniqueKeys() {
		return uniqueKeys.values();
	}

	public UniqueKey getOrCreateUniqueKey(String name) {
		if( uniqueKeys.containsKey( name ) ){
			return uniqueKeys.get( name );
		}
		UniqueKey uniqueKey = new UniqueKey( this, name );
		uniqueKeys.put(name, uniqueKey );
		return uniqueKey;
	}

	@Override
	public Iterable getCheckConstraints() {
		return checkConstraints;
	}

	@Override
	public void addCheckConstraint(String checkCondition) {
        //todo ? StringHelper.isEmpty( checkCondition );
        //todo default name?
		checkConstraints.add( new CheckConstraint( this, "", checkCondition ) );
	}

	@Override
	public Iterable getComments() {
		return comments;
	}

	@Override
	public void addComment(String comment) {
		comments.add( comment );
	}

	@Override
	public String getQualifiedName(Dialect dialect) {
		return objectName.toText( dialect );
	}

	public String[] sqlCreateStrings(Dialect dialect) {
		boolean hasPrimaryKey = getPrimaryKey().getColumns().iterator().hasNext();
		StringBuilder buf =
				new StringBuilder(
						hasPrimaryKey ? dialect.getCreateTableString() : dialect.getCreateMultisetTableString() )
				.append( ' ' )
				.append( objectName.toText( dialect ) )
				.append( " (" );


		// TODO: fix this when identity columns are supported by new metadata (HHH-6436)
		// for now, assume false
		//boolean identityColumn = idValue != null && idValue.isIdentityColumn( metadata.getIdentifierGeneratorFactory(), dialect );
		boolean isPrimaryKeyIdentity = false;

		// Try to find out the name of the primary key to create it as identity if the IdentityGenerator is used
		String pkColName = null;
		if ( hasPrimaryKey && isPrimaryKeyIdentity ) {
			Column pkColumn = getPrimaryKey().getColumns().iterator().next();
			pkColName = pkColumn.getColumnName().encloseInQuotesIfQuoted( dialect );
		}

		boolean isFirst = true;
		for ( SimpleValue simpleValue : values() ) {
			if ( ! Column.class.isInstance( simpleValue ) ) {
				continue;
			}
			if ( isFirst ) {
				isFirst = false;
			}
			else {
				buf.append( ", " );
			}
			Column col = ( Column ) simpleValue;
			String colName = col.getColumnName().encloseInQuotesIfQuoted( dialect );

			buf.append( colName ).append( ' ' );

			if ( isPrimaryKeyIdentity && colName.equals( pkColName ) ) {
				// to support dialects that have their own identity data type
				if ( dialect.hasDataTypeInIdentityColumn() ) {
					buf.append( getTypeString( col, dialect ) );
				}
				buf.append( ' ' )
						.append( dialect.getIdentityColumnString( col.getDatatype().getTypeCode() ) );
			}
			else {
				buf.append( getTypeString( col, dialect ) );

				String defaultValue = col.getDefaultValue();
				if ( defaultValue != null ) {
					buf.append( " default " ).append( defaultValue );
				}

				if ( col.isNullable() ) {
					buf.append( dialect.getNullColumnString() );
				}
				else {
					buf.append( " not null" );
				}

			}

			if ( col.isUnique() ) {
				UniqueKey uk = getOrCreateUniqueKey( col.getColumnName()
						.encloseInQuotesIfQuoted( dialect ) + '_' );
				uk.addColumn( col );
				buf.append( dialect.getUniqueDelegate()
						.getColumnDefinitionUniquenessFragment( col ) );
			}

			if ( col.getCheckCondition() != null && dialect.supportsColumnCheck() ) {
				buf.append( " check (" )
						.append( col.getCheckCondition() )
						.append( ")" );
			}

			String columnComment = col.getComment();
			if ( columnComment != null ) {
				buf.append( dialect.getColumnComment( columnComment ) );
			}
		}
		if ( hasPrimaryKey ) {
			buf.append( ", " )
					.append( getPrimaryKey().sqlConstraintStringInCreateTable( dialect ) );
		}

		buf.append( dialect.getUniqueDelegate().getTableCreationUniqueConstraintsFragment( this ) );

		if ( dialect.supportsTableCheck() ) {
			for ( CheckConstraint checkConstraint : checkConstraints ) {
				buf.append( ", check (" )
						.append( checkConstraint )
						.append( ')' );
			}
		}

		buf.append( ')' );
		buf.append( dialect.getTableTypeString() );

		String[] sqlStrings = new String[ comments.size() + 1 ];
		sqlStrings[ 0 ] = buf.toString();

		for ( int i = 0 ; i < comments.size(); i++ ) {
			sqlStrings[ i + 1 ] = dialect.getTableComment( comments.get( i ) );
		}

		return sqlStrings;
	}

	private static String getTypeString(Column col, Dialect dialect) {
		String typeString = null;
		if ( col.getSqlType() != null ) {
			typeString = col.getSqlType();
		}
		else {
			Size size = col.getSize() == null ?
					new Size( ) :
					col.getSize();

			typeString = dialect.getTypeName(
						col.getDatatype().getTypeCode(),
						size.getLength(),
						size.getPrecision(),
						size.getScale()
			);
		}
		return typeString;
	}

	@Override
	public String[] sqlDropStrings(Dialect dialect) {
		return new String[] { dialect.getDropTableString( getQualifiedName( dialect ) ) };
	}

	@Override
	public String toString() {
		return "Table{name=" + qualifiedName + '}';
	}
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy