org.mongodb.morphia.DatastoreImpl Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of morphia Show documentation
Show all versions of morphia Show documentation
Java Object Document Mapper for MongoDB
The newest version!
package org.mongodb.morphia;
import com.mongodb.BasicDBObject;
import com.mongodb.BasicDBObjectBuilder;
import com.mongodb.CommandResult;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBDecoderFactory;
import com.mongodb.DBObject;
import com.mongodb.DBRef;
import com.mongodb.DefaultDBDecoder;
import com.mongodb.MapReduceCommand;
import com.mongodb.MapReduceCommand.OutputType;
import com.mongodb.MongoClient;
import com.mongodb.ReadPreference;
import com.mongodb.WriteConcern;
import com.mongodb.WriteResult;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.CreateCollectionOptions;
import com.mongodb.client.model.DBCollectionUpdateOptions;
import com.mongodb.client.model.ValidationOptions;
import org.mongodb.morphia.aggregation.AggregationPipeline;
import org.mongodb.morphia.aggregation.AggregationPipelineImpl;
import org.mongodb.morphia.annotations.CappedAt;
import org.mongodb.morphia.annotations.Entity;
import org.mongodb.morphia.annotations.NotSaved;
import org.mongodb.morphia.annotations.PostPersist;
import org.mongodb.morphia.annotations.Validation;
import org.mongodb.morphia.annotations.Version;
import org.mongodb.morphia.logging.Logger;
import org.mongodb.morphia.logging.MorphiaLoggerFactory;
import org.mongodb.morphia.mapping.MappedClass;
import org.mongodb.morphia.mapping.MappedField;
import org.mongodb.morphia.mapping.Mapper;
import org.mongodb.morphia.mapping.MappingException;
import org.mongodb.morphia.mapping.cache.EntityCache;
import org.mongodb.morphia.mapping.lazy.proxy.ProxyHelper;
import org.mongodb.morphia.query.CountOptions;
import org.mongodb.morphia.query.DefaultQueryFactory;
import org.mongodb.morphia.query.Query;
import org.mongodb.morphia.query.QueryException;
import org.mongodb.morphia.query.QueryFactory;
import org.mongodb.morphia.query.UpdateException;
import org.mongodb.morphia.query.UpdateOperations;
import org.mongodb.morphia.query.UpdateOpsImpl;
import org.mongodb.morphia.query.UpdateResults;
import org.mongodb.morphia.utils.Assert;
import java.util.ArrayList;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import static com.mongodb.BasicDBObject.parse;
import static com.mongodb.BasicDBObjectBuilder.start;
import static com.mongodb.DBCollection.ID_FIELD_NAME;
import static java.lang.String.format;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
/**
* A generic (type-safe) wrapper around mongodb collections
*
* @deprecated This is an internal implementation of a published API. No public alternative planned.
*/
@Deprecated
@SuppressWarnings("deprecation")
public class DatastoreImpl implements AdvancedDatastore {
private static final Logger LOG = MorphiaLoggerFactory.get(DatastoreImpl.class);
private final Morphia morphia;
private final MongoClient mongoClient;
private final MongoDatabase database;
private final IndexHelper indexHelper;
private DB db;
private Mapper mapper;
private WriteConcern defConcern;
private DBDecoderFactory decoderFactory;
private volatile QueryFactory queryFactory = new DefaultQueryFactory();
/**
* Create a new DatastoreImpl
*
* @param morphia the Morphia instance
* @param mongoClient the connection to the MongoDB instance
* @param dbName the name of the database for this data store.
* @deprecated This is not meant to be directly instantiated by end user code. Use
* {@link Morphia#createDatastore(MongoClient, Mapper, String)}
*/
@Deprecated
public DatastoreImpl(final Morphia morphia, final MongoClient mongoClient, final String dbName) {
this(morphia, morphia.getMapper(), mongoClient, dbName);
}
/**
* Create a new DatastoreImpl
*
* @param morphia the Morphia instance
* @param mapper an initialised Mapper
* @param mongoClient the connection to the MongoDB instance
* @param dbName the name of the database for this data store.
* @deprecated This is not meant to be directly instantiated by end user code. Use
* {@link Morphia#createDatastore(MongoClient, Mapper, String)}
*/
@Deprecated
public DatastoreImpl(final Morphia morphia, final Mapper mapper, final MongoClient mongoClient, final String dbName) {
this(morphia, mapper, mongoClient, mongoClient.getDatabase(dbName));
}
private DatastoreImpl(final Morphia morphia, final Mapper mapper, final MongoClient mongoClient, final MongoDatabase database) {
this.morphia = morphia;
this.mapper = mapper;
this.mongoClient = mongoClient;
this.database = database;
this.db = mongoClient.getDB(database.getName());
this.defConcern = mongoClient.getWriteConcern();
this.indexHelper = new IndexHelper(mapper, database);
}
/**
* Creates a copy of this Datastore and all its configuration but with a new database
*
* @param database the new database to use for operations
* @return the new Datastore instance
* @deprecated use {@link Morphia#createDatastore(MongoClient, Mapper, String)}
*/
@Deprecated
public DatastoreImpl copy(final String database) {
return new DatastoreImpl(morphia, mapper, mongoClient, database);
}
/**
* @param source the initial type/collection to aggregate against
* @return a new query bound to the kind (a specific {@link DBCollection})
*/
@Override
public AggregationPipeline createAggregation(final Class source) {
return new AggregationPipelineImpl(this, getCollection(source), source);
}
@Override
public AggregationPipeline createAggregation(final String collection, final Class clazz) {
return new AggregationPipelineImpl(this, getDB().getCollection(collection), clazz);
}
@Override
public Query createQuery(final Class collection) {
return newQuery(collection, getCollection(collection));
}
@Override
public UpdateOperations createUpdateOperations(final Class clazz) {
return new UpdateOpsImpl(clazz, getMapper());
}
@Override
public WriteResult delete(final Query query, final DeleteOptions options) {
DBCollection dbColl = query.getCollection();
// TODO remove this after testing.
if (dbColl == null) {
dbColl = getCollection(query.getEntityClass());
}
if (query.getSortObject() != null || query.getOffset() != 0 || query.getLimit() > 0) {
throw new QueryException("Delete does not allow sort/offset/limit query options.");
}
return dbColl.remove(query.getQueryObject(), enforceWriteConcern(options, query.getEntityClass()).getOptions());
}
@Override
public WriteResult delete(final Class clazz, final V id) {
return delete(clazz, id, new DeleteOptions().writeConcern(getWriteConcern(clazz)));
}
@Override
public WriteResult delete(final Class clazz, final V id, final DeleteOptions options) {
return delete(createQuery(clazz).filter(Mapper.ID_KEY, id), options);
}
@Override
public WriteResult delete(final Class clazz, final Iterable ids) {
return delete(find(clazz).filter(Mapper.ID_KEY + " in", ids));
}
@Override
public WriteResult delete(final Class clazz, final Iterable ids, final DeleteOptions options) {
return delete(find(clazz).filter(Mapper.ID_KEY + " in", ids), options);
}
@Override
public WriteResult delete(final Query query) {
return delete(query, new DeleteOptions().writeConcern(getWriteConcern(query.getEntityClass())));
}
@Override
@Deprecated
public WriteResult delete(final Query query, final WriteConcern wc) {
return delete(query, new DeleteOptions().writeConcern(wc));
}
@Override
public WriteResult delete(final T entity) {
return delete(entity, getWriteConcern(entity));
}
/**
* Deletes the given entity (by @Id), with the WriteConcern
*
* @param entity the entity to delete
* @param options the options to use when deleting
* @return results of the delete
*/
@Override
public WriteResult delete(final T entity, final DeleteOptions options) {
final T wrapped = ProxyHelper.unwrap(entity);
if (wrapped instanceof Class) {
throw new MappingException("Did you mean to delete all documents? -- delete(ds.createQuery(???.class))");
}
try {
return delete(wrapped.getClass(), mapper.getId(wrapped), options);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
@Deprecated
public WriteResult delete(final T entity, final WriteConcern wc) {
return delete(entity, new DeleteOptions().writeConcern(wc));
}
@Override
public void ensureCaps() {
for (final MappedClass mc : mapper.getMappedClasses()) {
if (mc.getEntityAnnotation() != null && mc.getEntityAnnotation().cap().value() > 0) {
final CappedAt cap = mc.getEntityAnnotation().cap();
final String collName = mapper.getCollectionName(mc.getClazz());
final BasicDBObjectBuilder dbCapOpts = start("capped", true);
if (cap.value() > 0) {
dbCapOpts.add("size", cap.value());
}
if (cap.count() > 0) {
dbCapOpts.add("max", cap.count());
}
final DB database = getDB();
if (database.getCollectionNames().contains(collName)) {
final DBObject dbResult = database.command(start("collstats", collName).get());
if (dbResult.containsField("capped")) {
LOG.debug("DBCollection already exists and is capped already; doing nothing. " + dbResult);
} else {
LOG.warning("DBCollection already exists with same name(" + collName
+ ") and is not capped; not creating capped version!");
}
} else {
getDB().createCollection(collName, dbCapOpts.get());
LOG.debug("Created capped DBCollection (" + collName + ") with opts " + dbCapOpts);
}
}
}
}
@Override
public void enableDocumentValidation() {
for (final MappedClass mc : mapper.getMappedClasses()) {
process(mc, (Validation) mc.getAnnotation(Validation.class));
}
}
void process(final MappedClass mc, final Validation validation) {
if (validation != null) {
String collectionName = mc.getCollectionName();
CommandResult result = getDB()
.command(new BasicDBObject("collMod", collectionName)
.append("validator", parse(validation.value()))
.append("validationLevel", validation.level().getValue())
.append("validationAction", validation.action().getValue())
);
if (!result.ok()) {
if (result.getInt("code") == 26) {
ValidationOptions options = new ValidationOptions()
.validator(parse(validation.value()))
.validationLevel(validation.level())
.validationAction(validation.action());
getDatabase().createCollection(collectionName, new CreateCollectionOptions().validationOptions(options));
} else {
result.throwOnError();
}
}
}
}
@Override
public Key exists(final Object entityOrKey) {
final Query query = buildExistsQuery(entityOrKey);
return query.getKey();
}
@Override
public Query find(final Class clazz) {
return createQuery(clazz);
}
@Override
@Deprecated
public Query find(final Class clazz, final String property, final V value) {
final Query query = createQuery(clazz);
return query.filter(property, value);
}
@Override
@Deprecated
public Query find(final Class clazz, final String property, final V value, final int offset, final int size) {
final Query query = createQuery(clazz);
query.offset(offset);
query.limit(size);
return query.filter(property, value);
}
@Override
public T findAndDelete(final Query query) {
return findAndDelete(query, new FindAndModifyOptions());
}
@Override
public T findAndDelete(final Query query, final FindAndModifyOptions options) {
DBCollection dbColl = query.getCollection();
if (dbColl == null) {
dbColl = getCollection(query.getEntityClass());
}
if (LOG.isTraceEnabled()) {
LOG.trace("Executing findAndModify(" + dbColl.getName() + ") with delete ...");
}
FindAndModifyOptions copy = enforceWriteConcern(options, query.getEntityClass())
.copy()
.projection(query.getFieldsObject())
.sort(query.getSortObject())
.returnNew(false)
.upsert(false)
.remove(true);
final DBObject result = dbColl.findAndModify(query.getQueryObject(), copy.getOptions());
return result == null ? null : mapper.fromDBObject(this, query.getEntityClass(), result, createCache());
}
@Override
public T findAndModify(final Query query, final UpdateOperations operations, final FindAndModifyOptions options) {
DBCollection dbColl = query.getCollection();
// TODO remove this after testing.
if (dbColl == null) {
dbColl = getCollection(query.getEntityClass());
}
if (LOG.isTraceEnabled()) {
LOG.info("Executing findAndModify(" + dbColl.getName() + ") with update ");
}
updateForVersioning(query, operations);
DBObject res = dbColl.findAndModify(query.getQueryObject(), options.copy()
.sort(query.getSortObject())
.projection(query.getFieldsObject())
.update(((UpdateOpsImpl) operations).getOps())
.getOptions());
return res == null ? null : mapper.fromDBObject(this, query.getEntityClass(), res, createCache());
}
@Override
public T findAndModify(final Query query, final UpdateOperations operations) {
return findAndModify(query, operations, new FindAndModifyOptions()
.returnNew(true));
}
@Override
@Deprecated
public T findAndModify(final Query query, final UpdateOperations operations, final boolean oldVersion) {
return findAndModify(query, operations, new FindAndModifyOptions()
.returnNew(!oldVersion)
.upsert(false));
}
@Override
@Deprecated
public T findAndModify(final Query query, final UpdateOperations operations, final boolean oldVersion,
final boolean createIfMissing) {
return findAndModify(query, operations, new FindAndModifyOptions()
.returnNew(!oldVersion)
.upsert(createIfMissing));
}
private void updateForVersioning(final Query query, final UpdateOperations operations) {
final MappedClass mc = mapper.getMappedClass(query.getEntityClass());
if (!mc.getFieldsAnnotatedWith(Version.class).isEmpty()) {
operations.inc(mc.getMappedVersionField().getNameToStore());
}
}
@Override
public Query get(final Class clazz, final Iterable ids) {
return find(clazz).disableValidation().filter(Mapper.ID_KEY + " in", ids).enableValidation();
}
@Override
public T get(final Class clazz, final V id) {
return find(getCollection(clazz).getName(), clazz, Mapper.ID_KEY, id, 0, 1, true).get();
}
@Override
@SuppressWarnings("unchecked")
public T get(final T entity) {
final T unwrapped = ProxyHelper.unwrap(entity);
final Object id = mapper.getId(unwrapped);
if (id == null) {
throw new MappingException("Could not get id for " + unwrapped.getClass().getName());
}
return (T) get(unwrapped.getClass(), id);
}
@Override
public T getByKey(final Class clazz, final Key key) {
final String collectionName = mapper.getCollectionName(clazz);
final String keyCollection = mapper.updateCollection(key);
if (!collectionName.equals(keyCollection)) {
throw new RuntimeException("collection names don't match for key and class: " + collectionName + " != " + keyCollection);
}
Object id = key.getId();
if (id instanceof DBObject) {
((DBObject) id).removeField(Mapper.CLASS_NAME_FIELDNAME);
}
return get(clazz, id);
}
@Override
@SuppressWarnings({"rawtypes", "unchecked"})
public List getByKeys(final Class clazz, final Iterable> keys) {
final Map> kindMap = new HashMap>();
final List entities = new ArrayList();
// String clazzKind = (clazz==null) ? null :
// getMapper().getCollectionName(clazz);
for (final Key key : keys) {
mapper.updateCollection(key);
// if (clazzKind != null && !key.getKind().equals(clazzKind))
// throw new IllegalArgumentException("Types are not equal (" +
// clazz + "!=" + key.getKindClass() +
// ") for key and method parameter clazz");
//
if (kindMap.containsKey(key.getCollection())) {
kindMap.get(key.getCollection()).add(key);
} else {
kindMap.put(key.getCollection(), new ArrayList(singletonList((Key) key)));
}
}
for (final Map.Entry> entry : kindMap.entrySet()) {
final List kindKeys = entry.getValue();
final List