Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance. Project price only 1 $
You can buy this project and download/modify it how often you want.
/*
* Copyright 2006-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.batch.core.step.item;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import io.micrometer.core.instrument.Tag;
import io.micrometer.core.instrument.Timer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.listener.StepListenerFailedException;
import org.springframework.batch.core.metrics.BatchMetrics;
import org.springframework.batch.core.step.skip.LimitCheckingItemSkipPolicy;
import org.springframework.batch.core.step.skip.NonSkippableProcessException;
import org.springframework.batch.core.step.skip.SkipLimitExceededException;
import org.springframework.batch.core.step.skip.SkipListenerFailedException;
import org.springframework.batch.core.step.skip.SkipPolicy;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemWriter;
import org.springframework.classify.BinaryExceptionClassifier;
import org.springframework.classify.Classifier;
import org.springframework.retry.ExhaustedRetryException;
import org.springframework.retry.RecoveryCallback;
import org.springframework.retry.RetryCallback;
import org.springframework.retry.RetryContext;
import org.springframework.retry.RetryException;
import org.springframework.retry.support.DefaultRetryState;
/**
* FaultTolerant implementation of the {@link ChunkProcessor} interface, that
* allows for skipping or retry of items that cause exceptions during writing.
*
*/
public class FaultTolerantChunkProcessor extends SimpleChunkProcessor {
private SkipPolicy itemProcessSkipPolicy = new LimitCheckingItemSkipPolicy();
private SkipPolicy itemWriteSkipPolicy = new LimitCheckingItemSkipPolicy();
private final BatchRetryTemplate batchRetryTemplate;
private Classifier rollbackClassifier = new BinaryExceptionClassifier(true);
private Log logger = LogFactory.getLog(getClass());
private boolean buffering = true;
private KeyGenerator keyGenerator;
private ChunkMonitor chunkMonitor = new ChunkMonitor();
private boolean processorTransactional = true;
/**
* The {@link KeyGenerator} to use to identify failed items across rollback.
* Not used in the case of the {@link #setBuffering(boolean) buffering flag}
* being true (the default).
*
* @param keyGenerator the {@link KeyGenerator} to set
*/
public void setKeyGenerator(KeyGenerator keyGenerator) {
this.keyGenerator = keyGenerator;
}
/**
* @param SkipPolicy the {@link SkipPolicy} for item processing
*/
public void setProcessSkipPolicy(SkipPolicy SkipPolicy) {
this.itemProcessSkipPolicy = SkipPolicy;
}
/**
* @param SkipPolicy the {@link SkipPolicy} for item writing
*/
public void setWriteSkipPolicy(SkipPolicy SkipPolicy) {
this.itemWriteSkipPolicy = SkipPolicy;
}
/**
* A classifier that can distinguish between exceptions that cause rollback
* (return true) or not (return false).
*
* @param rollbackClassifier classifier
*/
public void setRollbackClassifier(Classifier rollbackClassifier) {
this.rollbackClassifier = rollbackClassifier;
}
/**
* @param chunkMonitor monitor
*/
public void setChunkMonitor(ChunkMonitor chunkMonitor) {
this.chunkMonitor = chunkMonitor;
}
/**
* A flag to indicate that items have been buffered and therefore will
* always come back as a chunk after a rollback. Otherwise things are more
* complicated because after a rollback the new chunk might or might not
* contain items from the previous failed chunk.
*
* @param buffering true if items will be buffered
*/
public void setBuffering(boolean buffering) {
this.buffering = buffering;
}
/**
* Flag to say that the {@link ItemProcessor} is transactional (defaults to
* true). If false then the processor is only called once per item per
* chunk, even if there are rollbacks with retries and skips.
*
* @param processorTransactional the flag value to set
*/
public void setProcessorTransactional(boolean processorTransactional) {
this.processorTransactional = processorTransactional;
}
public FaultTolerantChunkProcessor(ItemProcessor itemProcessor,
ItemWriter itemWriter, BatchRetryTemplate batchRetryTemplate) {
super(itemProcessor, itemWriter);
this.batchRetryTemplate = batchRetryTemplate;
}
@Override
protected void initializeUserData(Chunk inputs) {
@SuppressWarnings("unchecked")
UserData data = (UserData) inputs.getUserData();
if (data == null) {
data = new UserData<>();
inputs.setUserData(data);
data.setOutputs(new Chunk<>());
}
else {
// BATCH-2663: re-initialize filter count when scanning the chunk
if (data.scanning()) {
data.filterCount = 0;
}
}
}
@Override
protected int getFilterCount(Chunk inputs, Chunk outputs) {
@SuppressWarnings("unchecked")
UserData data = (UserData) inputs.getUserData();
return data.filterCount;
}
@Override
protected boolean isComplete(Chunk inputs) {
/*
* Need to remember the write skips across transactions, otherwise they
* keep coming back. Since we register skips with the inputs they will
* not be processed again but the output skips need to be saved for
* registration later with the listeners. The inputs are going to be the
* same for all transactions processing the same chunk, but the outputs
* are not, so we stash them in user data on the inputs.
*/
@SuppressWarnings("unchecked")
UserData data = (UserData) inputs.getUserData();
Chunk previous = data.getOutputs();
return inputs.isEmpty() && previous.getSkips().isEmpty();
}
@Override
protected Chunk getAdjustedOutputs(Chunk inputs, Chunk outputs) {
@SuppressWarnings("unchecked")
UserData data = (UserData) inputs.getUserData();
Chunk previous = data.getOutputs();
Chunk next = new Chunk<>(outputs.getItems(), previous.getSkips());
next.setBusy(previous.isBusy());
// Remember for next time if there are skips accumulating
data.setOutputs(next);
return next;
}
@Override
protected Chunk transform(final StepContribution contribution, Chunk inputs) throws Exception {
Chunk outputs = new Chunk<>();
@SuppressWarnings("unchecked")
final UserData data = (UserData) inputs.getUserData();
final Chunk cache = data.getOutputs();
final Iterator cacheIterator = cache.isEmpty() ? null : new ArrayList<>(cache.getItems()).iterator();
final AtomicInteger count = new AtomicInteger(0);
// final int scanLimit = processorTransactional && data.scanning() ? 1 :
// 0;
for (final Chunk.ChunkIterator iterator = inputs.iterator(); iterator.hasNext();) {
final I item = iterator.next();
RetryCallback retryCallback = new RetryCallback() {
@Override
public O doWithRetry(RetryContext context) throws Exception {
Timer.Sample sample = BatchMetrics.createTimerSample();
String status = BatchMetrics.STATUS_SUCCESS;
O output = null;
try {
count.incrementAndGet();
O cached = (cacheIterator != null && cacheIterator.hasNext()) ? cacheIterator.next() : null;
if (cached != null && !processorTransactional) {
output = cached;
}
else {
output = doProcess(item);
if (output == null) {
data.incrementFilterCount();
} else if (!processorTransactional && !data.scanning()) {
cache.add(output);
}
}
}
catch (Exception e) {
status = BatchMetrics.STATUS_FAILURE;
if (rollbackClassifier.classify(e)) {
// Default is to rollback unless the classifier
// allows us to continue
throw e;
}
else if (shouldSkip(itemProcessSkipPolicy, e, contribution.getStepSkipCount())) {
// If we are not re-throwing then we should check if
// this is skippable
contribution.incrementProcessSkipCount();
logger.debug("Skipping after failed process with no rollback", e);
// If not re-throwing then the listener will not be
// called in next chunk.
callProcessSkipListener(item, e);
}
else {
// If it's not skippable that's an error in
// configuration - it doesn't make sense to not roll
// back if we are also not allowed to skip
throw new NonSkippableProcessException(
"Non-skippable exception in processor. Make sure any exceptions that do not cause a rollback are skippable.",
e);
}
}
finally {
stopTimer(sample, contribution.getStepExecution(), "item.process", status, "Item processing");
}
if (output == null) {
// No need to re-process filtered items
iterator.remove();
}
return output;
}
};
RecoveryCallback recoveryCallback = new RecoveryCallback() {
@Override
public O recover(RetryContext context) throws Exception {
Throwable e = context.getLastThrowable();
if (shouldSkip(itemProcessSkipPolicy, e, contribution.getStepSkipCount())) {
iterator.remove(e);
contribution.incrementProcessSkipCount();
logger.debug("Skipping after failed process", e);
return null;
}
else {
if (rollbackClassifier.classify(e)) {
// Default is to rollback unless the classifier
// allows us to continue
throw new RetryException("Non-skippable exception in recoverer while processing", e);
}
iterator.remove(e);
return null;
}
}
};
O output = batchRetryTemplate.execute(retryCallback, recoveryCallback, new DefaultRetryState(
getInputKey(item), rollbackClassifier));
if (output != null) {
outputs.add(output);
}
/*
* We only want to process the first item if there is a scan for a
* failed item.
*/
if (data.scanning()) {
while (cacheIterator != null && cacheIterator.hasNext()) {
outputs.add(cacheIterator.next());
}
// Only process the first item if scanning
break;
}
}
return outputs;
}
@Override
protected void write(final StepContribution contribution, final Chunk inputs, final Chunk outputs)
throws Exception {
@SuppressWarnings("unchecked")
final UserData data = (UserData) inputs.getUserData();
final AtomicReference contextHolder = new AtomicReference<>();
RetryCallback