indi.atlantis.framework.chaconne.JdbcJobManager Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of chaconne-spring-boot-starter Show documentation
Show all versions of chaconne-spring-boot-starter Show documentation
Distributed task scheduling application framework
The newest version!
/**
* Copyright 2017-2021 Fred Feng ([email protected])
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package indi.atlantis.framework.chaconne;
import static com.github.paganini2008.devtools.beans.BeanUtils.convertAsBean;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.springframework.beans.factory.annotation.Autowired;
import com.github.paganini2008.devtools.ArrayUtils;
import com.github.paganini2008.devtools.StringUtils;
import com.github.paganini2008.devtools.beans.PropertyUtils;
import com.github.paganini2008.devtools.collection.CollectionUtils;
import com.github.paganini2008.devtools.date.DateUtils;
import com.github.paganini2008.devtools.jdbc.PageRequest;
import com.github.paganini2008.devtools.jdbc.PageResponse;
import com.github.paganini2008.devtools.jdbc.ResultSetSlice;
import indi.atlantis.framework.chaconne.model.JobDetail;
import indi.atlantis.framework.chaconne.model.JobKeyQuery;
import indi.atlantis.framework.chaconne.model.JobLog;
import indi.atlantis.framework.chaconne.model.JobRuntimeDetail;
import indi.atlantis.framework.chaconne.model.JobStackTrace;
import indi.atlantis.framework.chaconne.model.JobStat;
import indi.atlantis.framework.chaconne.model.JobStatDetail;
import indi.atlantis.framework.chaconne.model.JobStatPageQuery;
import indi.atlantis.framework.chaconne.model.JobStatQuery;
import indi.atlantis.framework.chaconne.model.JobStateCount;
import indi.atlantis.framework.chaconne.model.JobTrace;
import indi.atlantis.framework.chaconne.model.JobTracePageQuery;
import indi.atlantis.framework.chaconne.model.JobTraceQuery;
import indi.atlantis.framework.chaconne.model.JobTriggerDetail;
import indi.atlantis.framework.chaconne.model.PageQuery;
import indi.atlantis.framework.chaconne.model.Query;
import indi.atlantis.framework.chaconne.model.TriggerDescription;
import indi.atlantis.framework.chaconne.model.TriggerDescription.Dependency;
import lombok.extern.slf4j.Slf4j;
/**
*
* JdbcJobManager
*
* @author Fred Feng
*
* @since 2.0.1
*/
@Slf4j
public class JdbcJobManager implements JobManager {
@Autowired
private LifeCycleListenerContainer lifeCycleListenerContainer;
@Autowired
private JobIdCache jobIdCache;
@Autowired
private JobDao jobDao;
@Autowired
private JobQueryDao jobQueryDao;
@Override
public String[] selectClusterNames() throws Exception {
List clusterNames = jobQueryDao.selectClusterNames();
return clusterNames.toArray(new String[0]);
}
@Override
public JobState pauseJob(JobKey jobKey) throws Exception {
if (hasJob(jobKey) && hasJobState(jobKey, JobState.SCHEDULING)) {
if (log.isTraceEnabled()) {
log.trace("Pause job: " + jobKey);
}
return setJobState(jobKey, JobState.PAUSED);
}
return getJobRuntimeDetail(jobKey).getJobState();
}
@Override
public JobState resumeJob(JobKey jobKey) throws Exception {
if (hasJob(jobKey) && hasJobState(jobKey, JobState.PAUSED)) {
if (log.isTraceEnabled()) {
log.trace("Resume job: " + jobKey);
}
return setJobState(jobKey, JobState.SCHEDULING);
}
return getJobRuntimeDetail(jobKey).getJobState();
}
@Override
public int getJobId(final JobKey jobKey) throws Exception {
return jobIdCache.getJobId(jobKey, () -> {
return doGetJobId(jobKey);
});
}
private int doGetJobId(JobKey jobKey) {
if (hasJob(jobKey)) {
Integer jobId = jobQueryDao.selectJobId(jobKey.getClusterName(), jobKey.getGroupName(), jobKey.getJobName(),
jobKey.getJobClassName());
if (jobId != null) {
return jobId.intValue();
}
}
throw new JobBeanNotFoundException(jobKey);
}
@Override
public int persistJob(JobDefinition jobDef, String attachment) throws Exception {
final JobKey jobKey = JobKey.of(jobDef);
int jobId;
Trigger trigger;
TriggerType triggerType;
DependencyType dependencyType = null;
boolean hasDependentKeys = ArrayUtils.isNotEmpty(jobDef.getDependentKeys());
boolean hasSubKeys = ArrayUtils.isNotEmpty(jobDef.getForkKeys());
if (hasDependentKeys && hasSubKeys) {
dependencyType = DependencyType.MIXED;
} else if (hasDependentKeys && !hasSubKeys) {
dependencyType = DependencyType.SERIAL;
} else if (!hasDependentKeys && hasSubKeys) {
dependencyType = DependencyType.PARALLEL;
}
if (hasJob(jobKey)) {
jobId = getJobId(jobKey);
Map kwargs = new HashMap();
kwargs.put("description", jobDef.getDescription());
kwargs.put("attachment", attachment);
kwargs.put("email", jobDef.getEmail());
kwargs.put("retries", jobDef.getRetries());
kwargs.put("weight", jobDef.getWeight());
kwargs.put("timeout", jobDef.getTimeout());
kwargs.put("clusterName", jobDef.getClusterName());
kwargs.put("groupName", jobDef.getGroupName());
kwargs.put("jobName", jobDef.getJobName());
kwargs.put("jobClassName", jobDef.getJobClassName());
jobDao.updateJobDetail(kwargs);
trigger = jobDef.getTrigger();
triggerType = trigger.getTriggerType();
if (dependencyType != null) {
TriggerDescription triggerDescription = trigger.getTriggerDescription();
triggerDescription.setDependency(new Dependency());
Dependency dependency = triggerDescription.getDependency();
dependency.setDependencyType(dependencyType);
switch (dependencyType) {
case SERIAL:
dependency.setDependentKeys(jobDef.getDependentKeys());
dependency.setTriggerType(triggerType);
triggerDescription.setCron(null);
triggerDescription.setPeriodic(null);
break;
case PARALLEL:
dependency.setForkKeys(jobDef.getForkKeys());
dependency.setCompletionRate(jobDef.getCompletionRate());
dependency.setTriggerType(triggerType);
if (triggerType == TriggerType.CRON) {
triggerDescription.getDependency().setCron(triggerDescription.getCron());
triggerDescription.setCron(null);
} else if (triggerType == TriggerType.PERIODIC) {
triggerDescription.getDependency().setPeriodic(triggerDescription.getPeriodic());
triggerDescription.setPeriodic(null);
}
break;
case MIXED:
dependency.setDependentKeys(jobDef.getDependentKeys());
dependency.setForkKeys(jobDef.getForkKeys());
dependency.setTriggerType(triggerType);
triggerDescription.setCron(null);
triggerDescription.setPeriodic(null);
break;
}
}
kwargs.put("triggerType", hasDependentKeys || hasSubKeys ? TriggerType.DEPENDENT.getValue() : triggerType.getValue());
kwargs.put("triggerDescription", JacksonUtils.toJsonString(trigger.getTriggerDescription()));
kwargs.put("startDate", trigger.getStartDate() != null ? new Timestamp(trigger.getStartDate().getTime()) : null);
kwargs.put("endDate", trigger.getEndDate() != null ? new Timestamp(trigger.getEndDate().getTime()) : null);
kwargs.put("repeatCount", trigger.getRepeatCount());
kwargs.put("jobId", jobId);
jobDao.updateJobTrigger(kwargs);
log.info("Merge job '{}' ok.", jobKey);
JobRuntimeDetail runtimeDetail = getJobRuntimeDetail(jobKey);
if (runtimeDetail != null && runtimeDetail.getJobState() == JobState.RUNNING) {
setJobState(jobKey, JobState.SCHEDULING);
}
if (dependencyType != null) {
switch (dependencyType) {
case SERIAL:
handleJobDependency(jobKey, jobId, jobDef.getDependentKeys(), DependencyType.SERIAL);
break;
case PARALLEL:
handleJobDependency(jobKey, jobId, jobDef.getForkKeys(), DependencyType.PARALLEL);
break;
case MIXED:
handleJobDependency(jobKey, jobId, jobDef.getDependentKeys(), DependencyType.SERIAL);
handleJobDependency(jobKey, jobId, jobDef.getForkKeys(), DependencyType.PARALLEL);
break;
}
}
lifeCycleListenerContainer.onChange(jobKey, JobLifeCycle.REFRESH);
return jobId;
} else {
Map kwargs = new HashMap();
kwargs.put("clusterName", jobDef.getClusterName());
kwargs.put("groupName", jobDef.getGroupName());
kwargs.put("jobName", jobDef.getJobName());
kwargs.put("jobClassName", jobDef.getJobClassName());
kwargs.put("description", jobDef.getDescription());
kwargs.put("attachment", attachment);
kwargs.put("email", jobDef.getEmail());
kwargs.put("retries", jobDef.getRetries());
kwargs.put("weight", jobDef.getWeight());
kwargs.put("timeout", jobDef.getTimeout());
kwargs.put("createDate", new Timestamp(System.currentTimeMillis()));
jobId = jobDao.saveJobDetail(kwargs);
kwargs = new HashMap();
kwargs.put("jobId", jobId);
kwargs.put("jobState", JobState.NOT_SCHEDULED.getValue());
jobDao.saveJobRuntimeDetail(kwargs);
trigger = jobDef.getTrigger();
triggerType = trigger.getTriggerType();
if (dependencyType != null) {
TriggerDescription triggerDescription = trigger.getTriggerDescription();
triggerDescription.setDependency(new Dependency());
Dependency dependency = triggerDescription.getDependency();
dependency.setDependencyType(dependencyType);
switch (dependencyType) {
case SERIAL:
dependency.setDependentKeys(jobDef.getDependentKeys());
dependency.setTriggerType(triggerType);
triggerDescription.setCron(null);
triggerDescription.setPeriodic(null);
break;
case PARALLEL:
dependency.setForkKeys(jobDef.getForkKeys());
dependency.setCompletionRate(jobDef.getCompletionRate());
dependency.setTriggerType(triggerType);
if (triggerType == TriggerType.CRON) {
triggerDescription.getDependency().setCron(triggerDescription.getCron());
triggerDescription.setCron(null);
} else if (triggerType == TriggerType.PERIODIC) {
triggerDescription.getDependency().setPeriodic(triggerDescription.getPeriodic());
triggerDescription.setPeriodic(null);
}
break;
case MIXED:
dependency.setDependentKeys(jobDef.getDependentKeys());
dependency.setForkKeys(jobDef.getForkKeys());
dependency.setTriggerType(triggerType);
triggerDescription.setCron(null);
triggerDescription.setPeriodic(null);
break;
}
}
kwargs = new HashMap();
kwargs.put("jobId", jobId);
kwargs.put("triggerType", hasDependentKeys || hasSubKeys ? TriggerType.DEPENDENT.getValue() : triggerType.getValue());
kwargs.put("triggerDescription", JacksonUtils.toJsonString(trigger.getTriggerDescription()));
kwargs.put("startDate", trigger.getStartDate() != null ? new Timestamp(trigger.getStartDate().getTime()) : null);
kwargs.put("endDate", trigger.getEndDate() != null ? new Timestamp(trigger.getEndDate().getTime()) : null);
kwargs.put("repeatCount", trigger.getRepeatCount());
jobDao.saveJobTriggerDetail(kwargs);
log.info("Add job '{}' ok.", jobKey);
if (dependencyType != null) {
switch (dependencyType) {
case SERIAL:
handleJobDependency(jobKey, jobId, jobDef.getDependentKeys(), DependencyType.SERIAL);
break;
case PARALLEL:
handleJobDependency(jobKey, jobId, jobDef.getForkKeys(), DependencyType.PARALLEL);
break;
case MIXED:
handleJobDependency(jobKey, jobId, jobDef.getDependentKeys(), DependencyType.SERIAL);
handleJobDependency(jobKey, jobId, jobDef.getForkKeys(), DependencyType.PARALLEL);
break;
}
}
lifeCycleListenerContainer.onChange(jobKey, JobLifeCycle.CREATION);
return jobId;
}
}
/**
* Save Job Dependency Info
*
* @param jobKey
* @param jobId
* @param dependencies
* @param dependencyType
* @throws Exception
*/
private void handleJobDependency(JobKey jobKey, int jobId, JobKey[] dependencies, DependencyType dependencyType) throws Exception {
List dependentIds = new ArrayList();
if (ArrayUtils.isNotEmpty(dependencies)) {
for (JobKey dependency : dependencies) {
if (hasJob(dependency)) {
dependentIds.add(getJobId(dependency));
}
}
}
if (dependentIds.size() > 0) {
Map kwargs = new HashMap();
kwargs.put("jobId", jobId);
kwargs.put("dependencyType", dependencyType.getValue());
jobDao.deleteJobDependency(kwargs);
for (Integer dependentId : dependentIds) {
kwargs = new HashMap();
kwargs.put("jobId", jobId);
kwargs.put("dependentJobId", dependentId);
kwargs.put("dependencyType", dependencyType.getValue());
jobDao.saveJobDependency(kwargs);
}
log.info("Add job dependency by key '{}' ok.", jobKey);
}
}
@Override
public JobState finishJob(JobKey jobKey) throws Exception {
if (!hasJob(jobKey)) {
throw new JobBeanNotFoundException(jobKey);
}
if (!hasJobState(jobKey, JobState.NOT_SCHEDULED)) {
throw new JobException("Please unschedule the job before you delete it.");
}
try {
lifeCycleListenerContainer.onChange(jobKey, JobLifeCycle.COMPLETION);
return setJobState(jobKey, JobState.FINISHED);
} finally {
jobIdCache.evict(jobKey);
}
}
@Override
public boolean hasJob(JobKey jobKey) {
Integer result = jobQueryDao.selectJobExists(jobKey.getClusterName(), jobKey.getGroupName(), jobKey.getJobName(),
jobKey.getJobClassName());
return result != null && result.intValue() > 0;
}
@Override
public JobState setJobState(JobKey jobKey, JobState jobState) throws Exception {
final int jobId = getJobId(jobKey);
Map kwargs = new HashMap();
kwargs.put("jobState", jobState.getValue());
kwargs.put("jobId", jobId);
jobDao.updateJobState(kwargs);
return getJobRuntimeDetail(jobKey).getJobState();
}
@Override
public boolean hasJobState(JobKey jobKey, JobState jobState) throws Exception {
JobRuntimeDetail jobRuntime = getJobRuntimeDetail(jobKey);
return jobRuntime.getJobState() == jobState;
}
@Override
public JobDetail getJobDetail(JobKey jobKey, boolean detailed) throws Exception {
JobDetail jobDetail = doGetJobDetail(jobKey);
if (detailed) {
jobDetail.setJobRuntime(getJobRuntimeDetail(jobKey));
jobDetail.setJobTriggerDetail(getJobTriggerDetail(jobKey));
}
return jobDetail;
}
private JobDetail doGetJobDetail(JobKey jobKey) throws SQLException {
Map data = jobQueryDao.selectJobDetail(jobKey.getClusterName(), jobKey.getGroupName(), jobKey.getJobName(),
jobKey.getJobClassName());
if (data == null) {
throw new JobBeanNotFoundException(jobKey);
}
JobDetail jobDetail = convertAsBean(data, JobDetail.class);
jobDetail.setJobKey(JobKey.of(data));
return jobDetail;
}
@Override
public JobTriggerDetail getJobTriggerDetail(JobKey jobKey) throws Exception {
final int jobId = getJobId(jobKey);
Map data = jobQueryDao.selectJobTriggerDetail(jobId);
if (data == null) {
throw new JobBeanNotFoundException(jobKey);
}
return convertAsBean(data, JobTriggerDetail.class);
}
@Override
public boolean hasRelations(JobKey jobKey, DependencyType dependencyType) throws Exception {
int jobId = getJobId(jobKey);
Integer rowCount = jobQueryDao.selectJobHasRelations(jobId, dependencyType.getValue());
return rowCount != null && rowCount.intValue() > 0;
}
@Override
public JobKey[] getRelations(JobKey jobKey, DependencyType dependencyType) throws Exception {
Set jobKeys = new TreeSet();
int jobId = getJobId(jobKey);
List