
tech.powerjob.worker.persistence.db.TaskDAOImpl Maven / Gradle / Ivy
package tech.powerjob.worker.persistence.db;
import tech.powerjob.worker.common.constants.TaskStatus;
import tech.powerjob.worker.core.processor.TaskResult;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import tech.powerjob.worker.persistence.TaskDO;
import java.sql.*;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
* 任务持久化实现层,表名:task_info
*
* @author tjq
* @since 2020/3/17
*/
public class TaskDAOImpl implements TaskDAO {
private final boolean useIndex;
private final ConnectionFactory connectionFactory;
public TaskDAOImpl(ConnectionFactory connectionFactory) {
this(false, connectionFactory);
}
public TaskDAOImpl(boolean useIndex, ConnectionFactory connectionFactory) {
this.useIndex = useIndex;
this.connectionFactory = connectionFactory;
}
@Override
public void initTable() throws Exception {
String delTableSQL = "drop table if exists task_info";
// 感谢 Gitee 用户 @Linfly 反馈的 BUG
// bigint(20) 与 Java Long 取值范围完全一致
String createTableSQL = "create table task_info (task_id varchar(255), instance_id bigint, sub_instance_id bigint, task_name varchar(255), task_content blob, address varchar(255), status int, result text, failed_cnt int, created_time bigint, last_modified_time bigint, last_report_time bigint, constraint pkey unique (instance_id, task_id))";
String createIndexSQL = "create INDEX idx_status ON task_info (status)";
try (Connection conn = connectionFactory.getConnection(); Statement stat = conn.createStatement()) {
stat.execute(delTableSQL);
stat.execute(createTableSQL);
if (useIndex) {
stat.execute(createIndexSQL);
}
}
}
@Override
public boolean save(TaskDO task) throws SQLException {
String insertSQL = "insert into task_info(task_id, instance_id, sub_instance_id, task_name, task_content, address, status, result, failed_cnt, created_time, last_modified_time, last_report_time) values (?,?,?,?,?,?,?,?,?,?,?,?)";
try (Connection conn = connectionFactory.getConnection(); PreparedStatement ps = conn.prepareStatement(insertSQL)) {
fillInsertPreparedStatement(task, ps);
return ps.executeUpdate() == 1;
}
}
@Override
public boolean batchSave(Collection tasks) throws SQLException {
String insertSql = "insert into task_info(task_id, instance_id, sub_instance_id, task_name, task_content, address, status, result, failed_cnt, created_time, last_modified_time, last_report_time) values (?,?,?,?,?,?,?,?,?,?,?,?)";
boolean originAutoCommitFlag ;
try (Connection conn = connectionFactory.getConnection()) {
originAutoCommitFlag = conn.getAutoCommit();
conn.setAutoCommit(false);
try ( PreparedStatement ps = conn.prepareStatement(insertSql)) {
for (TaskDO task : tasks) {
fillInsertPreparedStatement(task, ps);
ps.addBatch();
}
ps.executeBatch();
return true;
} catch (Throwable e) {
conn.rollback();
throw e;
} finally {
conn.setAutoCommit(originAutoCommitFlag);
}
}
}
@Override
public boolean simpleDelete(SimpleTaskQuery condition) throws SQLException {
String deleteSQL = "delete from task_info where %s";
String sql = String.format(deleteSQL, condition.getQueryCondition());
try (Connection conn = connectionFactory.getConnection(); Statement stat = conn.createStatement()) {
stat.executeUpdate(sql);
return true;
}
}
@Override
public List simpleQuery(SimpleTaskQuery query) throws SQLException {
ResultSet rs = null;
String sql = "select * from task_info where " + query.getQueryCondition();
List result = Lists.newLinkedList();
try (Connection conn = connectionFactory.getConnection(); PreparedStatement ps = conn.prepareStatement(sql)) {
if (query.isReadOnly()) {
conn.setReadOnly(true);
}
rs = ps.executeQuery();
while (rs.next()) {
result.add(convert(rs));
}
} finally {
if (rs != null) {
try {
rs.close();
}catch (Exception ignore) {
}
}
}
return result;
}
@Override
public List
© 2015 - 2025 Weber Informatics LLC | Privacy Policy