com.cory.service.ClusterJobService Maven / Gradle / Ivy
package com.cory.service;
import com.alibaba.fastjson.JSON;
import com.cory.dao.ClusterJobDao;
import com.cory.model.Cluster;
import com.cory.model.ClusterJob;
import com.cory.page.Pagination;
import com.cory.util.IpUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Scope;
import org.springframework.context.annotation.ScopedProxyMode;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
/**
* generated by CodeGenerator on 2017/5/10.
*/
@Slf4j
@Service
@Scope(proxyMode = ScopedProxyMode.TARGET_CLASS)
public class ClusterJobService extends BaseService {
@Autowired
private ClusterJobDao clusterJobDao;
@Autowired
private ClusterService clusterService;
@Value("${server.port}")
private Integer port;
private static final ScheduledExecutorService TIMER = new ScheduledThreadPoolExecutor(1);
private static final Map> JOB_HANDLER = new HashMap<>();
private static boolean INITIALIZED = false;
/**
* 初始化方法,外部请勿调用
*/
@PostConstruct
public void init() {
if (INITIALIZED) {
return;
}
INITIALIZED = true;
TIMER.scheduleWithFixedDelay(() -> executeJob(), 30, 10, TimeUnit.SECONDS);
}
private void executeJob() {
log.debug("execute cluster job start");
int count = 0;
int pageNo = 1;
ClusterJob filter = ClusterJob.builder().ip(buildIpPort()).build();
Pagination pagination = this.list(pageNo, Integer.MAX_VALUE, filter, null);
while (null != pagination && CollectionUtils.isNotEmpty(pagination.getList())) {
pagination.getList().forEach(job -> doExecuteJob(job));
count += pagination.getList().size();
pageNo ++;
pagination = this.list(pageNo, Integer.MAX_VALUE, null, null);
}
log.debug("execute cluster job finish, job count: {}", count);
}
private void doExecuteJob(ClusterJob job) {
if (null == job) {
return;
}
//执行并删除,错误也删除,否则后续应该也是一直错误
try {
Consumer handler = JOB_HANDLER.get(job.getCode());
if (null == handler) {
log.warn("handler is null for cluster job: {}", job.getCode());
return;
}
handler.accept(job.getParam());
} catch (Throwable t) {
log.error("execute cluster job fail, job: {}", JSON.toJSONString(job), t);
} finally {
clusterJobDao.deleteById(job.getId());
}
}
@Override
public ClusterJobDao getDao() {
return clusterJobDao;
}
/**
* 添加任务:为集群所有机器添加。机器执行后会自动删除
* @param jobCode
* @param jobName
* @param param
*/
public void addJob(String jobCode, String jobName, String param) {
Pagination pagination = clusterService.list(1, Integer.MAX_VALUE, null, null);
if (null == pagination || CollectionUtils.isEmpty(pagination.getList())) {
return;
}
pagination.getList().forEach(cluster -> clusterJobDao.add(ClusterJob.builder()
.ip(cluster.getIp())
.code(jobCode)
.name(jobName)
.param(param)
.build()));
}
/**
* 注册任务执行器,一次性注册。注册后,请使用{@link ClusterJobService#addJob(String, String, String)}添加任务
* @param jobCode
* @param jobHandler
*/
public void registerJobHandler(String jobCode, Consumer jobHandler) {
JOB_HANDLER.put(jobCode, jobHandler);
}
private String buildIpPort() {
return IpUtil.getHostIp() + ":" + port;
}
}