
org.jooq.impl.LoaderImpl Maven / Gradle / Ivy
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Other licenses:
* -----------------------------------------------------------------------------
* Commercial licenses for this work are available. These replace the above
* ASL 2.0 and offer limited warranties, support, maintenance, and commercial
* database integrations.
*
* For more information, please visit: http://www.jooq.org/licenses
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package org.jooq.impl;
import static java.lang.Boolean.FALSE;
// ...
import static org.jooq.SQLDialect.MARIADB;
// ...
import static org.jooq.SQLDialect.MYSQL;
import static org.jooq.impl.Tools.EMPTY_FIELD;
import static org.jooq.impl.Tools.combine;
import static org.jooq.tools.jdbc.JDBCUtils.safeClose;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.BitSet;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Stream;
import org.jooq.BatchBindStep;
import org.jooq.Configuration;
import org.jooq.DSLContext;
import org.jooq.ExecuteContext;
import org.jooq.ExecuteListener;
import org.jooq.Field;
import org.jooq.InsertQuery;
import org.jooq.Loader;
import org.jooq.LoaderCSVOptionsStep;
import org.jooq.LoaderCSVStep;
import org.jooq.LoaderContext;
import org.jooq.LoaderError;
import org.jooq.LoaderFieldMapper;
import org.jooq.LoaderFieldMapper.LoaderFieldContext;
import org.jooq.LoaderJSONOptionsStep;
import org.jooq.LoaderJSONStep;
import org.jooq.LoaderOptionsStep;
import org.jooq.LoaderRowListener;
import org.jooq.LoaderRowsStep;
import org.jooq.LoaderXMLStep;
import org.jooq.Record;
import org.jooq.Result;
import org.jooq.SQLDialect;
import org.jooq.Source;
import org.jooq.Table;
import org.jooq.exception.DataAccessException;
import org.jooq.exception.LoaderConfigurationException;
import org.jooq.tools.JooqLogger;
import org.jooq.tools.StringUtils;
import org.jooq.tools.csv.CSVParser;
import org.jooq.tools.csv.CSVReader;
import org.jooq.tools.jdbc.DefaultPreparedStatement;
import org.xml.sax.InputSource;
/**
* @author Lukas Eder
* @author Johannes Bühler
*/
final class LoaderImpl implements
// Cascading interface implementations for Loader behaviour
LoaderOptionsStep,
LoaderRowsStep,
LoaderXMLStep,
LoaderCSVStep,
LoaderCSVOptionsStep,
LoaderJSONStep,
LoaderJSONOptionsStep,
Loader {
private static final JooqLogger log = JooqLogger.getLogger(LoaderImpl.class);
private static final Set NO_SUPPORT_ROWCOUNT_ON_DUPLICATE = SQLDialect.supportedBy(MARIADB, MYSQL);
// Configuration constants
// -----------------------
private static final int ON_DUPLICATE_KEY_ERROR = 0;
private static final int ON_DUPLICATE_KEY_IGNORE = 1;
private static final int ON_DUPLICATE_KEY_UPDATE = 2;
private static final int ON_ERROR_ABORT = 0;
private static final int ON_ERROR_IGNORE = 1;
private static final int COMMIT_NONE = 0;
private static final int COMMIT_AFTER = 1;
private static final int COMMIT_ALL = 2;
private static final int BATCH_NONE = 0;
private static final int BATCH_AFTER = 1;
private static final int BATCH_ALL = 2;
private static final int BULK_NONE = 0;
private static final int BULK_AFTER = 1;
private static final int BULK_ALL = 2;
private static final int CONTENT_CSV = 0;
private static final int CONTENT_XML = 1;
private static final int CONTENT_JSON = 2;
private static final int CONTENT_ARRAYS = 3;
// Configuration data
// ------------------
private final Configuration configuration;
private final Table table;
private int onDuplicate = ON_DUPLICATE_KEY_ERROR;
private int onError = ON_ERROR_ABORT;
private int commit = COMMIT_NONE;
private int commitAfter = 1;
private int batch = BATCH_NONE;
private int batchAfter = 1;
private int bulk = BULK_NONE;
private int bulkAfter = 1;
private int content = CONTENT_CSV;
private Source input;
private Iterator extends Object[]> arrays;
// CSV configuration data
// ----------------------
private int ignoreRows = 1;
private char quote = CSVParser.DEFAULT_QUOTE_CHARACTER;
private char separator = CSVParser.DEFAULT_SEPARATOR;
private String nullString = null;
private Field>[] source;
private Field>[] fields;
private LoaderFieldMapper fieldMapper;
private boolean fieldsCorresponding;
private BitSet primaryKey;
// Result data
// -----------
private LoaderRowListener onRowStart;
private LoaderRowListener onRowEnd;
private final LoaderContext rowCtx = new DefaultLoaderContext();
private int ignored;
private int processed;
private int stored;
private int executed;
private int unexecuted;
private int uncommitted;
private final List errors;
LoaderImpl(Configuration configuration, Table table) {
this.configuration = configuration;
this.table = table;
this.errors = new ArrayList<>();
}
// -------------------------------------------------------------------------
// Configuration setup
// -------------------------------------------------------------------------
@Override
public final LoaderImpl onDuplicateKeyError() {
onDuplicate = ON_DUPLICATE_KEY_ERROR;
return this;
}
@Override
public final LoaderImpl onDuplicateKeyIgnore() {
if (table.getPrimaryKey() == null) {
throw new IllegalStateException("ON DUPLICATE KEY IGNORE only works on tables with explicit primary keys. Table is not updatable : " + table);
}
onDuplicate = ON_DUPLICATE_KEY_IGNORE;
return this;
}
@Override
public final LoaderImpl onDuplicateKeyUpdate() {
if (table.getPrimaryKey() == null)
throw new IllegalStateException("ON DUPLICATE KEY UPDATE only works on tables with explicit primary keys. Table is not updatable : " + table);
onDuplicate = ON_DUPLICATE_KEY_UPDATE;
return this;
}
@Override
public final LoaderImpl onErrorIgnore() {
onError = ON_ERROR_IGNORE;
return this;
}
@Override
public final LoaderImpl onErrorAbort() {
onError = ON_ERROR_ABORT;
return this;
}
@Override
public final LoaderImpl commitEach() {
commit = COMMIT_AFTER;
return this;
}
@Override
public final LoaderImpl commitAfter(int number) {
commit = COMMIT_AFTER;
commitAfter = number;
return this;
}
@Override
public final LoaderImpl commitAll() {
commit = COMMIT_ALL;
return this;
}
@Override
public final LoaderImpl commitNone() {
commit = COMMIT_NONE;
return this;
}
@Override
public final LoaderImpl batchAll() {
batch = BATCH_ALL;
return this;
}
@Override
public final LoaderImpl batchNone() {
batch = BATCH_NONE;
return this;
}
@Override
public final LoaderImpl batchAfter(int number) {
batch = BATCH_AFTER;
batchAfter = number;
return this;
}
@Override
public final LoaderImpl bulkAll() {
bulk = BULK_ALL;
return this;
}
@Override
public final LoaderImpl bulkNone() {
bulk = BULK_NONE;
return this;
}
@Override
public final LoaderImpl bulkAfter(int number) {
bulk = BULK_AFTER;
bulkAfter = number;
return this;
}
@Override
public final LoaderRowsStep loadArrays(Object[]... a) {
return loadArrays(Arrays.asList(a));
}
@Override
public final LoaderRowsStep loadArrays(Iterable extends Object[]> a) {
return loadArrays(a.iterator());
}
@Override
public final LoaderRowsStep loadArrays(Iterator extends Object[]> a) {
content = CONTENT_ARRAYS;
this.arrays = a;
return this;
}
@Override
public final LoaderRowsStep loadRecords(Record... records) {
return loadRecords(Arrays.asList(records));
}
@Override
public final LoaderRowsStep loadRecords(Iterable extends Record> records) {
return loadRecords(records.iterator());
}
@Override
public final LoaderRowsStep loadRecords(Iterator extends Record> records) {
return loadArrays(Tools.iterator(records, value -> {
if (value == null)
return null;
if (source == null)
source = value.fields();
return value.intoArray();
}));
}
@Override
public final LoaderRowsStep loadArrays(Stream extends Object[]> a) {
return loadArrays(a.iterator());
}
@Override
public final LoaderRowsStep loadRecords(Stream extends Record> records) {
return loadRecords(records.iterator());
}
@Override
public final LoaderImpl loadCSV(File file) {
return loadCSV(Source.of(file));
}
@Override
public final LoaderImpl loadCSV(File file, String charsetName) {
return loadCSV(Source.of(file, charsetName));
}
@Override
public final LoaderImpl loadCSV(File file, Charset cs) {
return loadCSV(Source.of(file, cs));
}
@Override
public final LoaderImpl loadCSV(File file, CharsetDecoder dec) {
return loadCSV(Source.of(file, dec));
}
@Override
public final LoaderImpl loadCSV(String csv) {
return loadCSV(Source.of(csv));
}
@Override
public final LoaderImpl loadCSV(InputStream stream) {
return loadCSV(Source.of(stream));
}
@Override
public final LoaderImpl loadCSV(InputStream stream, String charsetName) {
return loadCSV(Source.of(stream, charsetName));
}
@Override
public final LoaderImpl loadCSV(InputStream stream, Charset cs) {
return loadCSV(Source.of(stream, cs));
}
@Override
public final LoaderImpl loadCSV(InputStream stream, CharsetDecoder dec) {
return loadCSV(Source.of(stream, dec));
}
@Override
public final LoaderImpl loadCSV(Reader reader) {
return loadCSV(Source.of(reader));
}
@Override
public final LoaderImpl loadCSV(Source s) {
content = CONTENT_CSV;
input = s;
return this;
}
@Override
public final LoaderImpl loadXML(File file) {
return loadXML(Source.of(file));
}
@Override
public final LoaderImpl loadXML(File file, String charsetName) {
return loadXML(Source.of(file, charsetName));
}
@Override
public final LoaderImpl loadXML(File file, Charset cs) {
return loadXML(Source.of(file, cs));
}
@Override
public final LoaderImpl loadXML(File file, CharsetDecoder dec) {
return loadXML(Source.of(file, dec));
}
@Override
public final LoaderImpl loadXML(String xml) {
return loadXML(Source.of(xml));
}
@Override
public final LoaderImpl loadXML(InputStream stream) {
return loadXML(Source.of(stream));
}
@Override
public final LoaderImpl loadXML(InputStream stream, String charsetName) {
return loadXML(Source.of(stream, charsetName));
}
@Override
public final LoaderImpl loadXML(InputStream stream, Charset cs) {
return loadXML(Source.of(stream, cs));
}
@Override
public final LoaderImpl loadXML(InputStream stream, CharsetDecoder dec) {
return loadXML(Source.of(stream, dec));
}
@Override
public final LoaderImpl loadXML(Reader reader) {
return loadXML(Source.of(reader));
}
@Override
public final LoaderImpl loadXML(InputSource s) {
content = CONTENT_XML;
throw new UnsupportedOperationException("This is not yet implemented");
}
@Override
public final LoaderImpl loadXML(Source s) {
content = CONTENT_XML;
input = s;
throw new UnsupportedOperationException("This is not yet implemented");
}
@Override
public final LoaderImpl loadJSON(File file) {
return loadJSON(Source.of(file));
}
@Override
public final LoaderImpl loadJSON(File file, String charsetName) {
return loadJSON(Source.of(file, charsetName));
}
@Override
public final LoaderImpl loadJSON(File file, Charset cs) {
return loadJSON(Source.of(file, cs));
}
@Override
public final LoaderImpl loadJSON(File file, CharsetDecoder dec) {
return loadJSON(Source.of(file, dec));
}
@Override
public final LoaderImpl loadJSON(String json) {
return loadJSON(Source.of(json));
}
@Override
public final LoaderImpl loadJSON(InputStream stream) {
return loadJSON(Source.of(stream));
}
@Override
public final LoaderImpl loadJSON(InputStream stream, String charsetName) {
return loadJSON(Source.of(stream, charsetName));
}
@Override
public final LoaderImpl loadJSON(InputStream stream, Charset cs) {
return loadJSON(Source.of(stream, cs));
}
@Override
public final LoaderImpl loadJSON(InputStream stream, CharsetDecoder dec) {
return loadJSON(Source.of(stream, dec));
}
@Override
public final LoaderImpl loadJSON(Reader reader) {
return loadJSON(Source.of(reader));
}
@Override
public final LoaderImpl loadJSON(Source s) {
content = CONTENT_JSON;
input = s;
return this;
}
// -------------------------------------------------------------------------
// CSV configuration
// -------------------------------------------------------------------------
@Override
public final LoaderImpl fields(Field>... f) {
this.fields = f;
this.primaryKey = new BitSet(f.length);
if (table.getPrimaryKey() != null)
for (int i = 0; i < fields.length; i++)
if (fields[i] != null && table.getPrimaryKey().getFields().contains(fields[i]))
primaryKey.set(i);
return this;
}
@Override
public final LoaderImpl fields(Collection extends Field>> f) {
return fields(f.toArray(EMPTY_FIELD));
}
@Override
public final LoaderImpl fields(LoaderFieldMapper mapper) {
fieldMapper = mapper;
return this;
}
@Override
@Deprecated
public LoaderImpl fieldsFromSource() {
return fieldsCorresponding();
}
@Override
public LoaderImpl fieldsCorresponding() {
fieldsCorresponding = true;
return this;
}
private final void fields0(Object[] row) {
Field>[] f = new Field[row.length];
// [#5145] When loading arrays, or when CSV headers are ignored,
// the source is still null at this stage.
if (source == null)
if (fieldsCorresponding)
throw new LoaderConfigurationException("Using fieldsCorresponding() requires field names to be available in source.");
else
source = Tools.fields(row.length);
if (fieldMapper != null)
for (int i = 0; i < row.length; i++) {
final int index = i;
f[i] = fieldMapper.map(new LoaderFieldContext() {
@Override
public int index() {
return index;
}
@Override
public Field> field() {
return source[index];
}
});
}
else if (fieldsCorresponding)
for (int i = 0; i < row.length; i++) {
f[i] = table.field(source[i]);
if (f[i] == null)
log.info("No column in target table " + table + " found for input field " + source[i]);
}
fields(f);
}
@Override
public final LoaderImpl ignoreRows(int number) {
ignoreRows = number;
return this;
}
@Override
public final LoaderImpl quote(char q) {
this.quote = q;
return this;
}
@Override
public final LoaderImpl separator(char s) {
this.separator = s;
return this;
}
@Override
public final LoaderImpl nullString(String n) {
this.nullString = n;
return this;
}
// -------------------------------------------------------------------------
// XML configuration
// -------------------------------------------------------------------------
// [...] to be specified
// -------------------------------------------------------------------------
// Listening
// -------------------------------------------------------------------------
@Override
public final LoaderImpl onRow(LoaderRowListener l) {
return onRowEnd(l);
}
@Override
public final LoaderImpl onRowStart(LoaderRowListener l) {
onRowStart = l;
return this;
}
@Override
public final LoaderImpl onRowEnd(LoaderRowListener l) {
onRowEnd = l;
return this;
}
// -------------------------------------------------------------------------
// Execution
// -------------------------------------------------------------------------
@Override
public final LoaderImpl execute() throws IOException {
checkFlags();
if (content == CONTENT_CSV)
executeCSV();
else if (content == CONTENT_XML)
throw new UnsupportedOperationException();
else if (content == CONTENT_JSON)
executeJSON();
else if (content == CONTENT_ARRAYS)
executeRows();
else
throw new IllegalStateException();
return this;
}
private final void checkFlags() {
if (bulk != BULK_NONE && onDuplicate != ON_DUPLICATE_KEY_ERROR)
throw new LoaderConfigurationException("Cannot apply bulk loading with onDuplicateKey flags. Turn off either flag.");
}
private final void executeJSON() {
Reader reader = null;
try {
reader = input.reader();
Result r = new JSONReader<>(configuration.dsl(), null, null, false).read(reader);
source = r.fields();
// The current json format is not designed for streaming. Thats why
// all records are loaded at once.
List
© 2015 - 2025 Weber Informatics LLC | Privacy Policy