org.jumpmind.symmetric.service.impl.DataExtractorService Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of symmetric-ds Show documentation
Show all versions of symmetric-ds Show documentation
SymmetricDS is an open source database synchronization solution. It is platform-independent,
web-enabled, and database-agnostic. SymmetricDS was first built to replicate changes between 'retail store'
databases and ad centralized 'corporate' database.
The newest version!
/*
* Licensed to JumpMind Inc under one or more contributor
* license agreements. See the NOTICE file distributed
* with this work for additional information regarding
* copyright ownership. JumpMind Inc licenses this file
* to you under the GNU Lesser General Public License (the
* "License"); you may not use this file except in compliance
* with the License.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, see
* .
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jumpmind.symmetric.service.impl;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.jumpmind.symmetric.Version;
import org.jumpmind.symmetric.common.Constants;
import org.jumpmind.symmetric.common.ParameterConstants;
import org.jumpmind.symmetric.common.TableConstants;
import org.jumpmind.symmetric.common.csv.CsvConstants;
import org.jumpmind.symmetric.ddl.model.Table;
import org.jumpmind.symmetric.extract.DataExtractorContext;
import org.jumpmind.symmetric.extract.IDataExtractor;
import org.jumpmind.symmetric.extract.IExtractorFilter;
import org.jumpmind.symmetric.io.ThresholdFileWriter;
import org.jumpmind.symmetric.model.BatchInfo;
import org.jumpmind.symmetric.model.ChannelMap;
import org.jumpmind.symmetric.model.Data;
import org.jumpmind.symmetric.model.DataEventType;
import org.jumpmind.symmetric.model.DataMetaData;
import org.jumpmind.symmetric.model.Node;
import org.jumpmind.symmetric.model.NodeChannel;
import org.jumpmind.symmetric.model.OutgoingBatch;
import org.jumpmind.symmetric.model.OutgoingBatches;
import org.jumpmind.symmetric.model.TriggerHistory;
import org.jumpmind.symmetric.model.TriggerRouter;
import org.jumpmind.symmetric.model.OutgoingBatch.Status;
import org.jumpmind.symmetric.route.SimpleRouterContext;
import org.jumpmind.symmetric.service.IAcknowledgeService;
import org.jumpmind.symmetric.service.IConfigurationService;
import org.jumpmind.symmetric.service.IDataExtractorService;
import org.jumpmind.symmetric.service.IDataService;
import org.jumpmind.symmetric.service.IExtractListener;
import org.jumpmind.symmetric.service.INodeService;
import org.jumpmind.symmetric.service.IOutgoingBatchService;
import org.jumpmind.symmetric.service.IRouterService;
import org.jumpmind.symmetric.service.ITriggerRouterService;
import org.jumpmind.symmetric.statistic.DataExtractorStatisticsWriter;
import org.jumpmind.symmetric.statistic.IStatisticManager;
import org.jumpmind.symmetric.statistic.StatisticConstants;
import org.jumpmind.symmetric.transport.IOutgoingTransport;
import org.jumpmind.symmetric.transport.TransportUtils;
import org.jumpmind.symmetric.upgrade.UpgradeConstants;
import org.jumpmind.symmetric.util.CsvUtils;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.BeanFactoryAware;
import org.springframework.dao.DataAccessException;
import org.springframework.jdbc.core.ConnectionCallback;
import org.springframework.jdbc.support.JdbcUtils;
/**
* ,
*/
public class DataExtractorService extends AbstractService implements IDataExtractorService, BeanFactoryAware {
private IOutgoingBatchService outgoingBatchService;
private IRouterService routingService;
private IDataService dataService;
private IConfigurationService configurationService;
private IAcknowledgeService acknowledgeService;
private ITriggerRouterService triggerRouterService;
private INodeService nodeService;
private BeanFactory beanFactory;
private DataExtractorContext clonableContext;
private List extractorFilters;
private IStatisticManager statisticManager;
/**
* @see DataExtractorService#extractConfigurationStandalone(Node,
* Writer)
*/
public void extractConfigurationStandalone(Node node, OutputStream out) throws IOException {
this.extractConfigurationStandalone(node, TransportUtils.toWriter(out));
}
/**
* Extract the SymmetricDS configuration for the passed in {@link Node}.
* Note that this method will insert an already acknowledged batch to
* indicate that the configuration was sent. If the configuration fails to
* load for some reason on the client the batch status will NOT reflect the
* failure.
*/
public void extractConfigurationStandalone(Node node, Writer writer) throws IOException {
try {
OutgoingBatch batch = new OutgoingBatch(node.getNodeId(), Constants.CHANNEL_CONFIG, Status.NE);
if (Version.isOlderThanVersion(node.getSymmetricVersion(),
UpgradeConstants.VERSION_FOR_NEW_REGISTRATION_PROTOCOL)) {
outgoingBatchService.insertOutgoingBatch(batch);
// acknowledge right away, because the acknowledgment is not
// built into the registration protocol.
acknowledgeService.ack(batch.getBatchInfo());
} else {
batch.setBatchId(BatchInfo.VIRTUAL_BATCH_FOR_REGISTRATION);
}
final IDataExtractor dataExtractor = getDataExtractor(node.getSymmetricVersion());
final DataExtractorContext ctxCopy = clonableContext.copy(dataExtractor);
dataExtractor.init(writer, ctxCopy);
dataExtractor.begin(batch, writer);
extractConfiguration(node, writer, ctxCopy);
dataExtractor.commit(batch, writer);
} finally {
writer.flush();
}
}
public void extractConfiguration(Node node, Writer writer, DataExtractorContext ctx) throws IOException {
List triggerRouters = triggerRouterService.getTriggerRoutersForRegistration(StringUtils.isBlank(node
.getSymmetricVersion()) ? Version.version() : node.getSymmetricVersion(),parameterService
.getNodeGroupId(), node.getNodeGroupId());
if (node.isVersionGreaterThanOrEqualTo(1, 5, 0)) {
for (int i = triggerRouters.size() - 1; i >= 0; i--) {
TriggerRouter triggerRouter = triggerRouters.get(i);
StringBuilder sql = new StringBuilder(dbDialect.createPurgeSqlFor(node, triggerRouter));
addPurgeCriteriaToConfigurationTables(triggerRouter.getTrigger().getSourceTableName(), sql);
CsvUtils.writeSql(sql.toString(), writer);
}
}
for (int i = 0; i < triggerRouters.size(); i++) {
TriggerRouter triggerRouter = triggerRouters.get(i);
final IDataExtractor dataExtractor = ctx != null ? ctx.getDataExtractor() : getDataExtractor(node
.getSymmetricVersion());
TriggerHistory triggerHistory = new TriggerHistory(dbDialect.getTable(triggerRouter.getTrigger(),
false), triggerRouter.getTrigger());
triggerHistory.setTriggerHistoryId(Integer.MAX_VALUE - i);
if (!triggerRouter.getTrigger().getSourceTableName().endsWith(TableConstants.SYM_NODE_IDENTITY)) {
writeInitialLoad(node, triggerRouter, triggerHistory, writer, ctx);
} else {
Data data = new Data(1, null, node.getNodeId(), DataEventType.INSERT, triggerRouter.getTrigger()
.getSourceTableName(), null, triggerHistory, triggerRouter.getTrigger().getChannelId(), null,
null);
dataExtractor.write(writer, data, triggerRouter.getRouter().getRouterId(), ctx);
}
}
if (triggerRouters.size() == 0) {
log.error("RegistrationEmpty", node);
}
}
private void addPurgeCriteriaToConfigurationTables(String sourceTableName, StringBuilder sql) {
if ((TableConstants.getTableName(dbDialect.getTablePrefix(), TableConstants.SYM_NODE)
.equalsIgnoreCase(sourceTableName))
|| TableConstants.getTableName(dbDialect.getTablePrefix(), TableConstants.SYM_NODE_SECURITY)
.equalsIgnoreCase(sourceTableName)) {
Node me = nodeService.findIdentity();
if (me != null) {
sql.append(String.format(" where created_at_node_id='%s'", me.getNodeId()));
}
}
}
private IDataExtractor getDataExtractor(String version) {
String beanName = Constants.DATA_EXTRACTOR;
// Version 1.4.1-appaji accepts "old" token, so it's like a 1.5 version
if (version != null) {
int[] versions = Version.parseVersion(version);
if (versions[0] == 1) {
if (versions[1] <= 2) {
beanName += "10";
} else if (versions[1] <= 3) {
beanName += "13";
} else if (versions[1] <= 4 && !version.equals("1.4.1-appaji")) {
beanName += "14";
} else if (versions[1] <= 7) {
beanName += "16";
}
}
}
return (IDataExtractor) beanFactory.getBean(beanName);
}
public void extractInitialLoadWithinBatchFor(Node node, final TriggerRouter trigger, Writer writer,
DataExtractorContext ctx, TriggerHistory triggerHistory) {
writeInitialLoad(node, trigger, triggerHistory, writer, ctx);
}
/**
* @param batch
* If null, then assume this 'initial load' is part of another
* batch.
*/
protected void writeInitialLoad(final Node node, final TriggerRouter triggerRouter, TriggerHistory triggerHistory,
final Writer writer, final DataExtractorContext ctx) {
triggerHistory = triggerHistory != null ? triggerHistory : triggerRouterService.getNewestTriggerHistoryForTrigger(triggerRouter.getTrigger()
.getTriggerId());
final boolean newExtractorCreated = ctx == null || ctx.getDataExtractor() == null;
final IDataExtractor dataExtractor = !newExtractorCreated ? ctx.getDataExtractor() : getDataExtractor(node
.getSymmetricVersion());
// The table to use for the SQL may be different than the configured table if there is a
// legacy table that is swapped out by the dataExtractor.
Table tableForSql = dbDialect.getTable(triggerRouter.getTrigger().getSourceCatalogName(), triggerRouter.getTrigger().getSourceSchemaName(),
dataExtractor.getLegacyTableName(triggerRouter.getTrigger().getSourceTableName()), true);
final String sql = dbDialect.createInitialLoadSqlFor(node, triggerRouter, tableForSql, triggerHistory);
log.debug("Sql",sql);
if (!tableForSql.getName().equals(triggerHistory.getSourceTableName())) {
// This is to make legacy tables backwards compatible
String tableName = triggerHistory.getSourceTableName();
triggerHistory = new TriggerHistory(tableForSql, triggerRouter.getTrigger());
triggerHistory.setSourceTableName(tableName);
}
final TriggerHistory triggerHistory2Use = triggerHistory;
jdbcTemplate.execute(new ConnectionCallback