
com.effektif.mongo.MongoWorkflowInstanceStore Maven / Gradle / Ivy
/*
* Copyright 2014 Effektif GmbH.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.effektif.mongo;
import com.effektif.mongo.WorkflowInstanceFields.*;
import com.effektif.workflow.api.Configuration;
import com.effektif.workflow.api.model.WorkflowId;
import com.effektif.workflow.api.model.WorkflowInstanceId;
import com.effektif.workflow.api.query.WorkflowInstanceQuery;
import com.effektif.workflow.api.types.DataType;
import com.effektif.workflow.api.workflow.Extensible;
import com.effektif.workflow.api.workflowinstance.VariableInstance;
import com.effektif.workflow.api.workflowinstance.WorkflowInstance;
import com.effektif.workflow.impl.WorkflowEngineImpl;
import com.effektif.workflow.impl.WorkflowInstanceStore;
import com.effektif.workflow.impl.configuration.Brewable;
import com.effektif.workflow.impl.configuration.Brewery;
import com.effektif.workflow.impl.data.DataTypeService;
import com.effektif.workflow.impl.job.Job;
import com.effektif.workflow.impl.util.Exceptions;
import com.effektif.workflow.impl.util.Time;
import com.effektif.workflow.impl.workflow.ActivityImpl;
import com.effektif.workflow.impl.workflow.ScopeImpl;
import com.effektif.workflow.impl.workflow.VariableImpl;
import com.effektif.workflow.impl.workflow.WorkflowImpl;
import com.effektif.workflow.impl.workflowinstance.*;
import com.mongodb.*;
import org.bson.types.ObjectId;
import org.slf4j.Logger;
import java.util.*;
import static com.effektif.mongo.ActivityInstanceFields.*;
import static com.effektif.mongo.ActivityInstanceFields.DURATION;
import static com.effektif.mongo.ActivityInstanceFields.END;
import static com.effektif.mongo.ActivityInstanceFields.END_STATE;
import static com.effektif.mongo.ActivityInstanceFields.START;
import static com.effektif.mongo.MongoDb._ID;
import static com.effektif.mongo.MongoHelper.*;
import static com.effektif.mongo.WorkflowInstanceFields.*;
public class MongoWorkflowInstanceStore implements WorkflowInstanceStore, Brewable {
public static final Logger log = MongoDb.log;
protected Configuration configuration;
protected WorkflowEngineImpl workflowEngine;
protected MongoCollection workflowInstancesCollection;
protected MongoJobStore mongoJobsStore;
protected boolean storeWorkflowIdsAsStrings;
protected DataTypeService dataTypeService;
protected MongoObjectMapper mongoMapper;
@Override
public void brew(Brewery brewery) {
MongoConfiguration mongoConfiguration = brewery.get(MongoConfiguration.class);
MongoDb mongoDb = brewery.get(MongoDb.class);
this.configuration = brewery.get(MongoConfiguration.class);
this.workflowEngine = brewery.get(WorkflowEngineImpl.class);
this.workflowInstancesCollection = mongoDb.createCollection(mongoConfiguration.workflowInstancesCollectionName);
this.storeWorkflowIdsAsStrings = mongoConfiguration.getStoreWorkflowIdsAsString();
this.mongoJobsStore = brewery.get(MongoJobStore.class);
this.dataTypeService = brewery.get(DataTypeService.class);
this.mongoMapper = brewery.get(MongoObjectMapper.class);
}
@Override
public WorkflowInstanceId generateWorkflowInstanceId() {
return new WorkflowInstanceId(new ObjectId().toString());
}
@Override
public void insertWorkflowInstance(WorkflowInstanceImpl workflowInstance) {
BasicDBObject dbWorkflowInstance = writeWorkflowInstance(workflowInstance);
workflowInstancesCollection.insert("insert-workflow-instance", dbWorkflowInstance);
workflowInstance.trackUpdates(false);
}
@Override
public void flush(WorkflowInstanceImpl workflowInstance) {
if (log.isDebugEnabled()) log.debug("Flushing workflow instance...");
WorkflowInstanceUpdates updates = workflowInstance.getUpdates();
DBObject query = BasicDBObjectBuilder.start()
.add(_ID, new ObjectId(workflowInstance.id.getInternal()))
// I don't recall what this line was for... if you re-add it, please add a comment to explain
// .add(LOCK, writeLock(workflowInstance.lock))
.get();
BasicDBObject sets = new BasicDBObject();
BasicDBObject unsets = new BasicDBObject();
BasicDBObject update = new BasicDBObject();
if (updates.isEndChanged) {
// if (log.isDebugEnabled()) log.debug(" Workflow instance ended");
if (workflowInstance.end != null) {
sets.append(END, workflowInstance.end.toDate());
sets.append(DURATION, workflowInstance.duration);
}
else {
unsets.append(END, 1);
unsets.append(DURATION, 1);
}
}
if (updates.isEndStateChanged) {
sets.append(END_STATE, workflowInstance.getEndState());
}
// MongoDB can't combine updates of array elements together with
// adding elements to that array. That's why we overwrite the whole
// activity instance array when an update happened in there.
// We do archive the ended (and joined) activity instances into a separate collection
// that doesn't have to be loaded.
if (updates.isActivityInstancesChanged) {
BasicDBList dbActivityInstances = writeActiveActivityInstances(workflowInstance.activityInstances);
sets.append(ACTIVITY_INSTANCES, dbActivityInstances);
}
if (updates.isVariableInstancesChanged) {
writeVariableInstances(sets, workflowInstance);
}
if (updates.isWorkChanged) {
List work = writeWork(workflowInstance.work);
if (work!=null) {
sets.put(WORK, work);
} else {
unsets.put(WORK, 1);
}
}
if (updates.isAsyncWorkChanged) {
List workAsync = writeWork(workflowInstance.workAsync);
if (workAsync!=null) {
sets.put(WORK_ASYNC, workAsync);
} else {
unsets.put(WORK_ASYNC, 1);
}
}
if (updates.isNextActivityInstanceIdChanged) {
sets.put(NEXT_ACTIVITY_INSTANCE_ID, workflowInstance.nextActivityInstanceId);
}
if (updates.isNextVariableInstanceIdChanged) {
sets.put(NEXT_VARIABLE_INSTANCE_ID, workflowInstance.nextVariableInstanceId);
}
if (updates.isLockChanged) {
// a lock is only removed
unsets.put(LOCK, 1);
}
if (updates.isJobsChanged) {
List dbJobs = writeJobs(workflowInstance.jobs);
if (dbJobs!=null) {
sets.put(JOBS, dbJobs);
} else {
unsets.put(JOBS, 1);
}
}
if (updates.isPropertiesChanged) {
if (workflowInstance.properties != null && workflowInstance.properties.size() > 0)
sets.append(PROPERTIES, new BasicDBObject(workflowInstance.getProperties()));
else
unsets.append(PROPERTIES, 1);
}
if (!sets.isEmpty()) {
update.append("$set", sets);
}
if (!unsets.isEmpty()) {
update.append("$unset", unsets);
}
if (!update.isEmpty()) {
workflowInstancesCollection.update("flush-workflow-instance", query, update, false, false);
}
// reset the update tracking as all changes have been saved
workflowInstance.trackUpdates(false);
}
@Override
public void flushAndUnlock(WorkflowInstanceImpl workflowInstance) {
workflowInstance.removeLock();
flush(workflowInstance);
workflowInstance.notifyUnlockListeners();
}
@Override
public List findWorkflowInstances(WorkflowInstanceQuery query) {
BasicDBObject dbQuery = createDbQuery(query);
return findWorkflowInstances(dbQuery);
}
public List findWorkflowInstances(BasicDBObject dbQuery) {
DBCursor workflowInstanceCursor = workflowInstancesCollection.find("find-workflow-instance-impls", dbQuery);
List workflowInstances = new ArrayList<>();
while (workflowInstanceCursor.hasNext()) {
BasicDBObject dbWorkflowInstance = (BasicDBObject) workflowInstanceCursor.next();
WorkflowInstanceImpl workflowInstance = readWorkflowInstanceImpl(dbWorkflowInstance);
workflowInstances.add(workflowInstance);
}
return workflowInstances;
}
@Override
public void deleteWorkflowInstances(WorkflowInstanceQuery workflowInstanceQuery) {
BasicDBObject query = createDbQuery(workflowInstanceQuery);
workflowInstancesCollection.remove("delete-workflow-instances", query);
}
@Override
public void deleteAllWorkflowInstances() {
workflowInstancesCollection.remove("delete-workflow-instances-unchecked", new BasicDBObject(), false);
}
protected BasicDBObject createDbQuery(WorkflowInstanceQuery query) {
if (query == null) {
query = new WorkflowInstanceQuery();
}
BasicDBObject dbQuery = new BasicDBObject();
if (query.getWorkflowInstanceId() != null) {
dbQuery.append(_ID, new ObjectId(query.getWorkflowInstanceId().getInternal()));
}
if (query.getActivityId() != null) {
dbQuery.append(ACTIVITY_INSTANCES
, new BasicDBObject("$elemMatch", new BasicDBObject(ACTIVITY_ID, query.getActivityId())
.append(WORK_STATE, new BasicDBObject("$exists", true))));
}
if (query.getLockedBefore() != null) {
dbQuery.append(LOCK + "." + Lock.TIME, new BasicDBObject("$lt", query.getLockedBefore().toDate()));
}
return dbQuery;
}
public void saveWorkflowInstance(BasicDBObject dbWorkflowInstance) {
workflowInstancesCollection.save("save-workfow-instance", dbWorkflowInstance);
}
@Override
public WorkflowInstanceImpl getWorkflowInstanceImplById(WorkflowInstanceId workflowInstanceId) {
if (workflowInstanceId==null) {
return null;
}
DBObject query = createLockQuery();
query.put(_ID, new ObjectId(workflowInstanceId.getInternal()));
BasicDBObject dbWorkflowInstance = workflowInstancesCollection.findOne("get-workflow-instance", query);
if (dbWorkflowInstance==null) {
return null;
}
return readWorkflowInstanceImpl(dbWorkflowInstance);
}
@Override
public WorkflowInstanceImpl lockWorkflowInstance(WorkflowInstanceId workflowInstanceId) {
Exceptions.checkNotNullParameter(workflowInstanceId, "workflowInstanceId");
DBObject query = createLockQuery();
query.put(_ID, new ObjectId(workflowInstanceId.getInternal()));
DBObject update = createLockUpdate();
DBObject retrieveFields = new BasicDBObject()
.append(ARCHIVED_ACTIVITY_INSTANCES, false);
BasicDBObject dbWorkflowInstance = workflowInstancesCollection.findAndModify("lock-workflow-instance", query, update, retrieveFields);
if (dbWorkflowInstance==null) {
return null;
}
WorkflowInstanceImpl workflowInstance = readWorkflowInstanceImpl(dbWorkflowInstance);
workflowInstance.trackUpdates(false);
return workflowInstance;
}
@Override
public void unlockWorkflowInstance(WorkflowInstanceImpl workflowInstance) {
if (workflowInstance!=null) {
ObjectId workflowInstanceId = new ObjectId(workflowInstance.id.getInternal());
// @formatter:off
workflowInstancesCollection.update("unlock-workflow-instance",
new Query()
._id(workflowInstanceId)
.get(),
new Update()
.unset(LOCK)
.get());
// @formatter:off
workflowInstance.notifyUnlockListeners();
}
}
public DBObject createLockQuery() {
return BasicDBObjectBuilder.start()
.push(LOCK)
.add("$exists", false)
.pop()
.get();
}
public DBObject createLockUpdate() {
return BasicDBObjectBuilder.start()
.push("$set")
.push(LOCK)
.add(Lock.TIME, Time.now().toDate())
.add(Lock.OWNER, workflowEngine.getId())
.pop()
.pop()
.get();
}
@Override
public WorkflowInstanceImpl lockWorkflowInstanceWithJobsDue() {
DBObject query = createLockQuery();
query.put(JobFields.DONE, new BasicDBObject("$exists", false));
query.put(JOBS + "." + JobFields.DUE_DATE, new BasicDBObject("$lte", Time.now().toDate()));
DBObject update = createLockUpdate();
DBObject retrieveFields = new BasicDBObject()
.append(ARCHIVED_ACTIVITY_INSTANCES, false);
BasicDBObject dbWorkflowInstance = workflowInstancesCollection.findAndModify("lock-workflow-instance", query, update, retrieveFields, new BasicDBObject(START, 1), false, true, false);
if (dbWorkflowInstance==null) {
return null;
}
WorkflowInstanceImpl workflowInstance = readWorkflowInstanceImpl(dbWorkflowInstance);
workflowInstance.trackUpdates(false);
return workflowInstance;
}
public BasicDBObject writeWorkflowInstance(WorkflowInstanceImpl workflowInstance) {
BasicDBObject dbWorkflowInstance = mongoMapper.write(workflowInstance.toWorkflowInstance(true));
if (storeWorkflowIdsAsStrings) {
writeString(dbWorkflowInstance, WORKFLOW_ID, workflowInstance.workflow.id.getInternal());
}
writeLongOpt(dbWorkflowInstance, NEXT_ACTIVITY_INSTANCE_ID, workflowInstance.nextActivityInstanceId);
writeLongOpt(dbWorkflowInstance, NEXT_VARIABLE_INSTANCE_ID, workflowInstance.nextVariableInstanceId);
writeObjectOpt(dbWorkflowInstance, WORK, writeWork(workflowInstance.work));
writeObjectOpt(dbWorkflowInstance, WORK_ASYNC, writeWork(workflowInstance.workAsync));
writeObjectOpt(dbWorkflowInstance, JOBS, writeJobs(workflowInstance.jobs));
writeObjectOpt(dbWorkflowInstance, LOCK, writeLock(workflowInstance.lock));
return dbWorkflowInstance;
}
protected List writeWork(Queue workQueue) {
List workActivityInstanceIds = null;
if (workQueue!=null && !workQueue.isEmpty()) {
workActivityInstanceIds = new ArrayList<>();
for (ActivityInstanceImpl workActivityInstance: workQueue) {
workActivityInstanceIds.add(workActivityInstance.id);
}
}
return workActivityInstanceIds;
}
public WorkflowInstance readWorkflowInstance(BasicDBObject dbWorkflowInstance) {
return mongoMapper.read(dbWorkflowInstance, WorkflowInstance.class);
}
public WorkflowInstanceImpl readWorkflowInstanceImpl(BasicDBObject dbWorkflowInstance) {
if (dbWorkflowInstance==null) {
return null;
}
WorkflowInstanceImpl workflowInstance = new WorkflowInstanceImpl();
workflowInstance.id = readWorkflowInstanceId(dbWorkflowInstance, _ID);
workflowInstance.businessKey = readString(dbWorkflowInstance, BUSINESS_KEY);
Object workflowIdObject = readObject(dbWorkflowInstance, WORKFLOW_ID);
// workflowId is ObjectId in the MongoConfiguration
// workflowId is String in the MongoMemoryConfiguration
// The code is written to work dynamically (and not according to the
// configuration field storeWorkflowIdsAsStrings) because the test
// suite cleanup might encounter workflow instances created by the other engine
WorkflowId workflowId = new WorkflowId(workflowIdObject.toString());
WorkflowImpl workflow = workflowEngine.getWorkflowImpl(workflowId);
if (workflow==null) {
throw new RuntimeException("No workflow for instance "+workflowInstance.id);
}
workflowInstance.workflow = workflow;
workflowInstance.workflowInstance = workflowInstance;
workflowInstance.scope = workflow;
workflowInstance.configuration = configuration;
workflowInstance.callingWorkflowInstanceId = readWorkflowInstanceId(dbWorkflowInstance, CALLING_WORKFLOW_INSTANCE_ID);
workflowInstance.callingActivityInstanceId = readString(dbWorkflowInstance, CALLING_ACTIVITY_INSTANCE_ID);
workflowInstance.nextActivityInstanceId = readLong(dbWorkflowInstance, NEXT_ACTIVITY_INSTANCE_ID);
workflowInstance.nextVariableInstanceId = readLong(dbWorkflowInstance, NEXT_VARIABLE_INSTANCE_ID);
workflowInstance.lock = readLock((BasicDBObject) dbWorkflowInstance.get(LOCK));
workflowInstance.jobs = readJobs(readList(dbWorkflowInstance, JOBS));
Map allActivityIds = new HashMap<>();
readScopeImpl(workflowInstance, dbWorkflowInstance, allActivityIds);
resolveActivityReferences(workflowInstance, workflow, allActivityIds);
workflowInstance.work = readWork(dbWorkflowInstance, WORK, workflowInstance);
workflowInstance.workAsync = readWork(dbWorkflowInstance, WORK_ASYNC, workflowInstance);
workflowInstance.properties = readObjectMap(dbWorkflowInstance, PROPERTIES);
workflowInstance.setProperty(ORGANIZATION_ID, readObject(dbWorkflowInstance, ORGANIZATION_ID));
copyProperties(dbWorkflowInstance, workflowInstance);
return workflowInstance;
}
/**
* Reads database fields (that do not have Java fields) and copies them to workflow instance properties.
* This makes it possible to write non-standard fields to the database and read them from properties.
*/
private void copyProperties(BasicDBObject dbWorkflowInstance, WorkflowInstanceImpl workflowInstance) {
if (dbWorkflowInstance == null || workflowInstance == null) {
return;
}
Set invalidPropertyKeys = Extensible.getInvalidPropertyKeys(WorkflowInstance.class);
// Map mappedBeanFields = mongoMapper.write(workflowInstance.toWorkflowInstance());
for (String fieldName : dbWorkflowInstance.keySet()) {
boolean property = !invalidPropertyKeys.contains(fieldName);
if (property) {
workflowInstance.setProperty(fieldName, dbWorkflowInstance.get(fieldName));
}
}
}
protected void readScopeImpl(ScopeInstanceImpl scopeInstance, BasicDBObject dbScopeInstance, Map allActivityIds) {
scopeInstance.start = readTime(dbScopeInstance, START);
scopeInstance.end = readTime(dbScopeInstance, END);
scopeInstance.endState = readString(dbScopeInstance, END_STATE);
scopeInstance.duration = readLong(dbScopeInstance, DURATION);
readActivityInstances(scopeInstance, dbScopeInstance, allActivityIds);
readVariableInstances(scopeInstance, dbScopeInstance);
}
protected void readActivityInstances(ScopeInstanceImpl scopeInstance, BasicDBObject dbScopeInstance, Map allActivityIds) {
Map
© 2015 - 2025 Weber Informatics LLC | Privacy Policy