All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.hibernate.ogm.datastore.cassandra.CassandraDialect Maven / Gradle / Ivy

The newest version!
/*
 * Hibernate OGM, Domain model persistence for NoSQL datastores
 *
 * License: GNU Lesser General Public License (LGPL), version 2.1 or later
 * See the lgpl.txt file in the root directory or .
 */
package org.hibernate.ogm.datastore.cassandra;

import static com.datastax.driver.core.querybuilder.QueryBuilder.eq;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;

import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.dialect.lock.LockingStrategy;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Table;
import org.hibernate.ogm.datastore.cassandra.impl.CassandraDatastoreProvider;
import org.hibernate.ogm.datastore.cassandra.impl.CassandraTypeMapper;
import org.hibernate.ogm.datastore.cassandra.logging.impl.Log;
import org.hibernate.ogm.datastore.cassandra.logging.impl.LoggerFactory;
import org.hibernate.ogm.datastore.cassandra.model.impl.ResultSetTupleIterator;
import org.hibernate.ogm.datastore.cassandra.query.impl.CassandraParameterMetadataBuilder;
import org.hibernate.ogm.datastore.map.impl.MapAssociationSnapshot;
import org.hibernate.ogm.datastore.map.impl.MapHelpers;
import org.hibernate.ogm.datastore.map.impl.MapTupleSnapshot;
import org.hibernate.ogm.dialect.query.spi.BackendQuery;
import org.hibernate.ogm.dialect.query.spi.ClosableIterator;
import org.hibernate.ogm.dialect.query.spi.ParameterMetadataBuilder;
import org.hibernate.ogm.dialect.query.spi.QueryParameters;
import org.hibernate.ogm.dialect.query.spi.QueryableGridDialect;
import org.hibernate.ogm.dialect.query.spi.TypedGridValue;
import org.hibernate.ogm.dialect.spi.AssociationContext;
import org.hibernate.ogm.dialect.spi.AssociationTypeContext;
import org.hibernate.ogm.dialect.spi.BaseGridDialect;
import org.hibernate.ogm.dialect.spi.GridDialect;
import org.hibernate.ogm.dialect.spi.ModelConsumer;
import org.hibernate.ogm.dialect.spi.NextValueRequest;
import org.hibernate.ogm.dialect.spi.OperationContext;
import org.hibernate.ogm.dialect.spi.TransactionContext;
import org.hibernate.ogm.dialect.spi.TupleAlreadyExistsException;
import org.hibernate.ogm.dialect.spi.TupleContext;
import org.hibernate.ogm.dialect.spi.TuplesSupplier;
import org.hibernate.ogm.dialect.spi.TupleTypeContext;
import org.hibernate.ogm.entityentry.impl.TuplePointer;
import org.hibernate.ogm.model.key.spi.AssociationKey;
import org.hibernate.ogm.model.key.spi.AssociationKeyMetadata;
import org.hibernate.ogm.model.key.spi.EntityKey;
import org.hibernate.ogm.model.key.spi.EntityKeyMetadata;
import org.hibernate.ogm.model.key.spi.RowKey;
import org.hibernate.ogm.model.spi.Association;
import org.hibernate.ogm.model.spi.AssociationOperation;
import org.hibernate.ogm.model.spi.Tuple;
import org.hibernate.ogm.model.spi.Tuple.SnapshotType;
import org.hibernate.ogm.model.spi.TupleOperation;
import org.hibernate.ogm.type.spi.GridType;
import org.hibernate.persister.entity.Lockable;
import org.hibernate.type.Type;

import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.ColumnDefinitions;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.RegularStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.SimpleStatement;
import com.datastax.driver.core.exceptions.DriverException;
import com.datastax.driver.core.querybuilder.Delete;
import com.datastax.driver.core.querybuilder.Insert;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;

/**
 * Dialect implementation using CQL3 over Cassandra's native transport via java-driver.
 *
 * @author Jonathan Halliday
 */
public class CassandraDialect extends BaseGridDialect implements GridDialect, QueryableGridDialect {

	private static final Log log = LoggerFactory.getLogger();

	private final CassandraDatastoreProvider provider;
	private final Session session;
	private final LoadingCache preparedStatementCache;

	public CassandraDialect(CassandraDatastoreProvider provider) {
		this.provider = provider;
		session = provider.getSession();


		preparedStatementCache = CacheBuilder.newBuilder()
				.maximumSize( 100000 )
				.build(
						new CacheLoader() {
							@Override
							public PreparedStatement load(String query) throws Exception {
								return session.prepare( query );
							}
						}
				);
	}

	@Override
	public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) {
		//Cassandra essentially has no workable lock strategy unless you use external tools like
		// ZooKeeper or fancy cql overlays like waitchain.
		return null;
	}

	private ResultSet bindAndExecute(Object[] columnValues, RegularStatement statement) {

		PreparedStatement preparedStatement;
		try {
			preparedStatement = preparedStatementCache.get( statement.getQueryString() );
			session.prepare( statement );
		}
		catch (ExecutionException e) {
			throw log.failToPrepareCQL( statement.getQueryString(), e.getCause() );
		}

		try {
			BoundStatement boundStatement = new BoundStatement( preparedStatement );
			boundStatement.bind( columnValues );
			return session.execute( boundStatement );
		}
		catch (DriverException e) {
			throw log.failToExecuteCQL( statement.getQueryString(), e );
		}
	}

	// temporary, as equivalent in java-driver's Querybuilder is broken.
	// https://datastax-oss.atlassian.net/browse/JAVA-712
	private static String quote(String columnName) {
		StringBuilder sb = new StringBuilder();
		sb.append( '"' );
		sb.append( columnName );
		sb.append( '"' );
		return sb.toString();
	}

	@Override
	public Tuple getTuple(EntityKey key, OperationContext operationContext) {

		Select select = QueryBuilder.select().all().from( quote( key.getTable() ) );
		Select.Where selectWhere = select.where( eq( quote( key.getColumnNames()[0] ), QueryBuilder.bindMarker() ) );
		for ( int i = 1; i < key.getColumnNames().length; i++ ) {
			selectWhere = selectWhere.and( eq( quote( key.getColumnNames()[i] ), QueryBuilder.bindMarker() ) );
		}

		Object[] columnValues = key.getColumnValues();
		ResultSet resultSet = bindAndExecute( columnValues, select );

		if ( resultSet.isExhausted() ) {
			return null;
		}

		Row row = resultSet.one();
		Tuple tuple = createTuple( row );
		return tuple;
	}

	@Override
	public Tuple createTuple(EntityKey key, OperationContext operationContext) {
		Map toSave = new HashMap();
		toSave.put( key.getColumnNames()[0], key.getColumnValues()[0] );
		return new Tuple( new MapTupleSnapshot( toSave ), SnapshotType.INSERT );
	}

	@Override
	public void insertOrUpdateTuple(EntityKey key, TuplePointer tuplePointer, TupleContext tupleContext)
			throws TupleAlreadyExistsException {
		Tuple tuple = tuplePointer.getTuple();

		List updateOps = new ArrayList( tuple.getOperations().size() );
		List deleteOps = new ArrayList( tuple.getOperations().size() );

		for ( TupleOperation op : tuple.getOperations() ) {
			switch ( op.getType() ) {
				case PUT:
					updateOps.add( op );
					break;
				case REMOVE:
				case PUT_NULL:
					deleteOps.add( op );
					break;
				default:
					throw new HibernateException( "TupleOperation not supported: " + op.getType() );
			}
		}

		if ( deleteOps.size() > 0 ) {

			Delete.Selection deleteSelection = QueryBuilder.delete();
			for ( TupleOperation tupleOperation : deleteOps ) {
				deleteSelection.column( quote( tupleOperation.getColumn() ) );
			}
			Delete delete = deleteSelection.from( quote( key.getTable() ) );
			Delete.Where deleteWhere = delete.where(
					eq(
							quote( key.getColumnNames()[0] ),
							QueryBuilder.bindMarker()
					)
			);
			for ( int i = 1; i < key.getColumnNames().length; i++ ) {
				deleteWhere = deleteWhere.and( eq( quote( key.getColumnNames()[i] ), QueryBuilder.bindMarker() ) );
			}

			bindAndExecute( key.getColumnValues(), delete );
		}

		if ( updateOps.size() > 0 ) {

			// insert and update are both 'upsert' in cassandra.
			Insert insert = QueryBuilder.insertInto( quote( key.getTable() ) );
			List columnValues = new LinkedList<>();
			Set seenColNames = new HashSet<>();
			for ( int i = 0; i < updateOps.size(); i++ ) {
				TupleOperation op = updateOps.get( i );
				insert.value( quote( op.getColumn() ), QueryBuilder.bindMarker() );
				columnValues.add( op.getValue() );
				seenColNames.add( op.getColumn() );
			}
			for ( int j = 0; j < key.getColumnNames().length; j++ ) {
				String keyCol = key.getColumnNames()[j];
				if ( !seenColNames.contains( keyCol ) ) {
					insert.value( quote( keyCol ), QueryBuilder.bindMarker() );
					columnValues.add( key.getColumnValues()[j] );
				}
			}

			bindAndExecute( columnValues.toArray(), insert );
		}
	}

	@Override
	public void removeTuple(EntityKey key, TupleContext tupleContext) {

		Delete delete = QueryBuilder.delete().from( quote( key.getTable() ) );
		Delete.Where deleteWhere = delete.where(
				eq( quote( key.getColumnNames()[0] ), QueryBuilder.bindMarker() )
		);
		for ( int i = 1; i < key.getColumnNames().length; i++ ) {
			deleteWhere = deleteWhere.and( eq( quote( key.getColumnNames()[i] ), QueryBuilder.bindMarker() ) );
		}

		bindAndExecute( key.getColumnValues(), delete );
	}

	@Override
	public Association getAssociation(AssociationKey key, AssociationContext associationContext) {
		Table tableMetadata = provider.getMetaDataCache().get( key.getTable() );
		List tablePKCols = tableMetadata.getPrimaryKey().getColumns();

		Select select = QueryBuilder.select().all().from( quote( key.getTable() ) );
		Select.Where selectWhere = select.where( eq( quote( key.getColumnNames()[0] ), QueryBuilder.bindMarker() ) );
		for ( int i = 1; i < key.getColumnNames().length; i++ ) {
			selectWhere = selectWhere.and( eq( quote( key.getColumnNames()[i] ), QueryBuilder.bindMarker() ) );
		}

		boolean requiredFiltering = false;
		for ( Column column : tablePKCols ) {
			String name = column.getName();
			boolean foundColumn = false;
			for ( int i = 0; i < key.getColumnNames().length; i++ ) {
				if ( name.equals( key.getColumnNames()[i] ) ) {
					foundColumn = true;
					break;
				}
			}
			if ( !foundColumn ) {
				requiredFiltering = true;
				break;
			}
		}

		if ( requiredFiltering ) {
			select.allowFiltering();
		}

		Object[] columnValues = key.getColumnValues();
		ResultSet resultSet = bindAndExecute( columnValues, select );

		if ( resultSet.isExhausted() ) {
			return null;
		}

		Map> rowKeyMap = new HashMap<>();

		List combinedKeys = new LinkedList();
		combinedKeys.addAll( Arrays.asList( key.getColumnNames() ) );
		for ( Object column : tableMetadata.getPrimaryKey().getColumns() ) {
			String name = ( (Column) column ).getName();
			if ( ! combinedKeys.contains( name ) ) {
				combinedKeys.add( name );
			}
		}
		String[] columnNames = combinedKeys.toArray( new String[combinedKeys.size()] );

		for ( Row row : resultSet ) {

			Map rowMap = tupleFromRow( row );
			Object[] resultColumnValues = new Object[columnNames.length];
			for ( int i = 0; i < columnNames.length; i++ ) {
				resultColumnValues[i] = rowMap.get( columnNames[i] );
			}
			RowKey rowKey = new RowKey( columnNames, resultColumnValues );
			rowKeyMap.put( rowKey, rowMap );
		}

		Association association = new Association( new MapAssociationSnapshot( rowKeyMap ) );

		return association;
	}

	@Override
	public Association createAssociation(AssociationKey key, AssociationContext associationContext) {
		return new Association( new MapAssociationSnapshot( new HashMap>() ) );
	}

	@Override
	public void insertOrUpdateAssociation(
			AssociationKey key,
			Association association,
			AssociationContext associationContext) {

		if ( key.getMetadata().isInverse() ) {
			return;
		}

		Table tableMetadata = provider.getMetaDataCache().get( key.getTable() );
		Set keyColumnNames = new HashSet();
		for ( Object columnObject : tableMetadata.getPrimaryKey().getColumns() ) {
			Column column = (Column) columnObject;
			keyColumnNames.add( column.getName() );
		}

		List updateOps = new ArrayList(
				association.getOperations()
						.size()
		);
		List deleteOps = new ArrayList(
				association.getOperations()
						.size()
		);

		for ( AssociationOperation op : association.getOperations() ) {
			switch ( op.getType() ) {
				case CLEAR:
					break;
				case PUT:
					updateOps.add( op );
					break;
				case REMOVE:
					deleteOps.add( op );
					break;
				default:
					throw new HibernateException( "AssociationOperation not supported: " + op.getType() );
			}
		}

		for ( AssociationOperation op : updateOps ) {
			Tuple value = op.getValue();
			List columnValues = new ArrayList<>();
			Insert insert = QueryBuilder.insertInto( quote( key.getTable() ) );
			for ( String columnName : value.getColumnNames() ) {
				insert.value( quote( columnName ), QueryBuilder.bindMarker( columnName ) );
				columnValues.add( value.get( columnName ) );
			}

			bindAndExecute( columnValues.toArray(), insert );
		}

		for ( AssociationOperation op : deleteOps ) {

			RowKey value = op.getKey();
			Delete.Selection deleteSelection = QueryBuilder.delete();
			for ( String columnName : op.getKey().getColumnNames() ) {
				if ( !keyColumnNames.contains( columnName ) ) {
					deleteSelection.column( quote( columnName ) );
				}
			}
			Delete delete = deleteSelection.from( quote( key.getTable() ) );
			List columnValues = new LinkedList<>();
			for ( String columnName : value.getColumnNames() ) {
				if ( keyColumnNames.contains( columnName ) ) {
					delete.where( eq( quote( columnName ), QueryBuilder.bindMarker( columnName ) ) );
					columnValues.add( value.getColumnValue( columnName ) );
				}
			}

			bindAndExecute( columnValues.toArray(), delete );
		}

		MapHelpers.updateAssociation( association );
	}

	@Override
	public void removeAssociation(AssociationKey key, AssociationContext associationContext) {
		if ( key.getMetadata().isInverse() ) {
			return;
		}

		Table tableMetadata = provider.getMetaDataCache().get( key.getTable() );
		Set keyColumnNames = new HashSet();
		for ( Object columnObject : tableMetadata.getPrimaryKey().getColumns() ) {
			Column column = (Column) columnObject;
			keyColumnNames.add( column.getName() );
		}

		Delete.Selection deleteSelection = QueryBuilder.delete();
		for ( String columnName : key.getColumnNames() ) {
			if ( !keyColumnNames.contains( columnName ) ) {
				deleteSelection.column( quote( columnName ) );
			}
		}

		Delete delete = deleteSelection.from( quote( key.getTable() ) );
		List columnValues = new LinkedList<>();

		boolean hasWhereClause = false;
		for ( String columnName : key.getColumnNames() ) {
			if ( keyColumnNames.contains( columnName ) ) {
				delete.where( eq( quote( columnName ), QueryBuilder.bindMarker( columnName ) ) );
				columnValues.add( key.getColumnValue( columnName ) );
				hasWhereClause = true;
			}
		}

		if ( !hasWhereClause ) {
			return;
		}

		bindAndExecute( columnValues.toArray(), delete );
	}

	@Override
	public GridType overrideType(Type type) {
		return CassandraTypeMapper.INSTANCE.overrideType( type );
	}

	@Override
	public void forEachTuple(ModelConsumer consumer, TupleTypeContext tupleTypeContext, EntityKeyMetadata entityKeyMetadata) {
		Select select = QueryBuilder.select().all().from( quote( entityKeyMetadata.getTable() ) );

		ResultSet resultSet;
		try {
			resultSet = session.execute( select );
		}
		catch (DriverException e) {
			throw e;
		}

		TuplesSupplier supplier = new CassandraTuplesSupplier( resultSet );
		consumer.consume( supplier );
	}

	private static class CassandraTuplesSupplier implements TuplesSupplier {

		private final ResultSet resultSet;

		public CassandraTuplesSupplier(ResultSet resultSet) {
			this.resultSet = resultSet;
		}

		@Override
		public ClosableIterator get(TransactionContext transactionContext) {
			return new CassandraTupleIterator( resultSet.iterator() );
		}
	}

	private static class CassandraTupleIterator implements ClosableIterator {

		private final Iterator iterator;

		public CassandraTupleIterator(Iterator iterator) {
			this.iterator = iterator;
		}

		@Override
		public boolean hasNext() {
			return iterator.hasNext();
		}

		@Override
		public Tuple next() {
			return createTuple( iterator.next() );
		}

		@Override
		public void close() {
		}

		@Override
		public void remove() {
		}
	}

	private static Tuple createTuple(Row row) {
		return new Tuple( new MapTupleSnapshot( tupleFromRow( row ) ), SnapshotType.UPDATE );
	}

	public static Map tupleFromRow(Row row) {
		Map map = new HashMap<>();

		ColumnDefinitions columnDefinitions = row.getColumnDefinitions();
		int count = columnDefinitions.size();
		for ( int index = 0; index < count; index++ ) {
			String columnName = columnDefinitions.getName( index );
			Object value = row.getObject( index );
			map.put( columnName, value );
		}
		return map;
	}

	@Override
	public boolean isStoredInEntityStructure(
			AssociationKeyMetadata associationKeyMetadata,
			AssociationTypeContext associationTypeContext) {
		return false;
	}

	@Override
	public Number nextValue(NextValueRequest request) {
		return provider.getSequenceHandler().nextValue( request );
	}

	@Override
	public ClosableIterator executeBackendQuery(
			BackendQuery query, QueryParameters queryParameters, TupleContext tupleContext) {

		Object[] parameters = new Object[queryParameters.getPositionalParameters().size()];
		int i = 0;
		Tuple dummy = new Tuple();

		for ( TypedGridValue parameter : queryParameters.getPositionalParameters() ) {
			parameter.getType().nullSafeSet( dummy, parameter.getValue(), new String[]{ "dummy" }, null );
			parameters[i] = dummy.get( "dummy" );
			i++;
		}

		ResultSet resultSet = bindAndExecute(
				parameters,
				new SimpleStatement( query.getQuery() )
		);

		int first = 0;
		if ( queryParameters.getRowSelection().getFirstRow() != null ) {
			first = queryParameters.getRowSelection().getFirstRow();
		}

		int max = Integer.MAX_VALUE;
		if ( queryParameters.getRowSelection().getMaxRows() != null ) {
			max = queryParameters.getRowSelection().getMaxRows();
		}

		return new ResultSetTupleIterator( resultSet, first, max );
	}

	@Override
	public int executeBackendUpdateQuery(BackendQuery query, QueryParameters queryParameters, TupleContext tupleContext) {
		// TODO implement. org.hibernate.ogm.datastore.mongodb.MongoDBDialect.executeBackendUpdateQuery(BackendQuery, QueryParameters) might be helpful as a reference.
		throw new UnsupportedOperationException( "Not yet implemented." );
	}

	@Override
	public ParameterMetadataBuilder getParameterMetadataBuilder() {
		return new CassandraParameterMetadataBuilder( session, provider.getMetaDataCache() );
	}

	@Override
	public String parseNativeQuery(String nativeQuery) {
		// we defer the work, since at this point the table may not yet exist in the db and without
		// the db server supplied metadata or parsing assistance we can't do much meaningful validation.
		return nativeQuery;
	}

	@Override
	public boolean usesNavigationalInformationForInverseSideOfAssociations() {
		return false;
	}

}