All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.landawn.abacus.jdbc.JdbcUtils Maven / Gradle / Ivy

There is a newer version: 3.8.5
Show newest version
/*
 * Copyright (c) 2020, Haiyang Li.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.landawn.abacus.jdbc;

import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.function.Supplier;

import com.landawn.abacus.annotation.Beta;
import com.landawn.abacus.annotation.SequentialOnly;
import com.landawn.abacus.annotation.Stateful;
import com.landawn.abacus.jdbc.Jdbc.ColumnGetter;
import com.landawn.abacus.parser.JSONSerializationConfig;
import com.landawn.abacus.parser.JSONSerializationConfig.JSC;
import com.landawn.abacus.type.Type;
import com.landawn.abacus.util.BufferedJSONWriter;
import com.landawn.abacus.util.CSVUtil;
import com.landawn.abacus.util.DataSet;
import com.landawn.abacus.util.DateTimeFormat;
import com.landawn.abacus.util.Fn;
import com.landawn.abacus.util.IOUtil;
import com.landawn.abacus.util.N;
import com.landawn.abacus.util.Objectory;
import com.landawn.abacus.util.ParsedSql;
import com.landawn.abacus.util.Strings;
import com.landawn.abacus.util.Throwables;
import com.landawn.abacus.util.WD;

/**
 *
 * @see {@link com.landawn.abacus.util.CSVUtil}
 * @see {@link com.landawn.abacus.condition.ConditionFactory}
 * @see {@link com.landawn.abacus.condition.ConditionFactory.CF}
 * @see {@link com.landawn.abacus.annotation.ReadOnly}
 * @see {@link com.landawn.abacus.annotation.ReadOnlyId}
 * @see {@link com.landawn.abacus.annotation.NonUpdatable}
 * @see {@link com.landawn.abacus.annotation.Transient}
 * @see {@link com.landawn.abacus.annotation.Table}
 * @see {@link com.landawn.abacus.annotation.Column}
 *
 * @see Connection
 * @see Statement
 * @see PreparedStatement
 * @see ResultSet
 */
public final class JdbcUtils {

    static final char[] ELEMENT_SEPARATOR_CHAR_ARRAY = Strings.ELEMENT_SEPARATOR.toCharArray();

    static final char[] NULL_CHAR_ARRAY = Strings.NULL_STRING.toCharArray();

    static final int DEFAULT_QUEUE_SIZE_FOR_ROW_PARSER = 1024;

    private JdbcUtils() {
        // singleton.
    }

    /**
     * Imports the data from {@code DataSet} to the database.
     * 

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(yourSelectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param dataset * @param sourceDataSource * @param insertSQL the column order in the sql must be consistent with the column order in the DataSet. * @return * @throws SQLException */ public static int importData(final DataSet dataset, final javax.sql.DataSource sourceDataSource, final String insertSQL) throws SQLException { final Connection conn = sourceDataSource.getConnection(); try { return importData(dataset, conn, insertSQL); } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(yourSelectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param dataset the DataSet containing the data to be imported * @param conn the Connection to the database * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final Connection conn, final String insertSQL) throws SQLException { return importData(dataset, dataset.columnNameList(), conn, insertSQL); } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(selectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param dataset the DataSet containing the data to be imported * @param selectColumnNames the collection of column names to be selected * @param conn the Connection to the database * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final Collection selectColumnNames, final Connection conn, final String insertSQL) throws SQLException { return importData(dataset, selectColumnNames, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0); } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(selectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param dataset the DataSet containing the data to be imported * @param selectColumnNames the collection of column names to be selected * @param offset the starting point in the DataSet * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final Collection selectColumnNames, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis) throws SQLException { return importData(dataset, selectColumnNames, Fn.alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis); } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(selectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param the type of exception that might be thrown * @param dataset the DataSet containing the data to be imported * @param selectColumnNames the collection of column names to be selected * @param filter a predicate to filter the data * @param conn the Connection to the database * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws E if the filter throws an exception */ public static int importData(final DataSet dataset, final Collection selectColumnNames, final Throwables.Predicate filter, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis) throws SQLException, E { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { return importData(dataset, selectColumnNames, filter, stmt, batchSize, batchIntervalInMillis); } } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(yourSelectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param dataset the DataSet containing the data to be imported * @param offset the starting point in the DataSet * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet * @param columnTypeMap a map specifying the types of the columns * @return the number of rows affected * @throws SQLException if a database access error occurs */ @SuppressWarnings("rawtypes") public static int importData(final DataSet dataset, final Connection conn, final String insertSQL, final Map columnTypeMap) throws SQLException { return importData(dataset, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeMap); } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(yourSelectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param dataset the DataSet containing the data to be imported * @param offset the starting point in the DataSet * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param columnTypeMap a map specifying the types of the columns * @return the number of rows affected * @throws SQLException if a database access error occurs */ @SuppressWarnings("rawtypes") public static int importData(final DataSet dataset, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) throws SQLException { return importData(dataset, Fn.alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, columnTypeMap); } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(yourSelectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param the type of exception that might be thrown * @param dataset the DataSet containing the data to be imported * @param filter a predicate to filter the data * @param conn the Connection to the database * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param columnTypeMap a map specifying the types of the columns * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws E if the filter throws an exception */ @SuppressWarnings("rawtypes") public static int importData(final DataSet dataset, final Throwables.Predicate filter, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) throws SQLException, E { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { return importData(dataset, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); } } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(yourSelectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param dataset the DataSet containing the data to be imported * @param conn the Connection to the database * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final Connection conn, final String insertSQL, final Throwables.BiConsumer stmtSetter) throws SQLException { return importData(dataset, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(yourSelectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param dataset the DataSet containing the data to be imported * @param conn the Connection to the database * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { return importData(dataset, Fn.alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, stmtSetter); } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(yourSelectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param the type of exception that might be thrown * @param dataset the DataSet containing the data to be imported * @param filter a predicate to filter the data * @param conn the Connection to the database * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws E if the filter throws an exception */ public static int importData(final DataSet dataset, final Throwables.Predicate filter, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException, E { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { return importData(dataset, filter, stmt, batchSize, batchIntervalInMillis, stmtSetter); } } /** * Imports the data from {@code DataSet} to the database using the provided {@code PreparedStatement}. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* * @param dataset the DataSet containing the data to be imported * @param stmt the PreparedStatement to be used for the import * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final PreparedStatement stmt) throws SQLException { return importData(dataset, dataset.columnNameList(), stmt); } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(selectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param dataset the DataSet containing the data to be imported * @param selectColumnNames the collection of column names to be selected * @param stmt the PreparedStatement to be used for the import * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final Collection selectColumnNames, final PreparedStatement stmt) throws SQLException { return importData(dataset, selectColumnNames, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0); } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(selectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param dataset the DataSet containing the data to be imported * @param selectColumnNames the collection of column names to be selected * @param stmt the PreparedStatement to be used for the import * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final Collection selectColumnNames, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis) throws SQLException { return importData(dataset, selectColumnNames, Fn.alwaysTrue(), stmt, batchSize, batchIntervalInMillis); } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(selectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param the type of exception that might be thrown * @param dataset the DataSet containing the data to be imported * @param selectColumnNames the collection of column names to be selected * @param filter a predicate to filter the data * @param stmt the PreparedStatement to be used for the import * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws E if the filter throws an exception */ public static int importData(final DataSet dataset, final Collection selectColumnNames, final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis) throws SQLException, E { final Type objType = N.typeOf(Object.class); final Map> columnTypeMap = new HashMap<>(); for (final String propName : selectColumnNames) { columnTypeMap.put(propName, objType); } return importData(dataset, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); } /** * Imports the data from {@code DataSet} to the database using the provided {@code PreparedStatement}. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* * @param dataset the DataSet containing the data to be imported * @param stmt the PreparedStatement to be used for the import * @param columnTypeMap a map specifying the types of the columns * @return the number of rows affected * @throws SQLException if a database access error occurs */ @SuppressWarnings("rawtypes") public static int importData(final DataSet dataset, final PreparedStatement stmt, final Map columnTypeMap) throws SQLException { return importData(dataset, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeMap); } /** * Imports the data from {@code DataSet} to the database using the provided {@code PreparedStatement}. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* * @param dataset the DataSet containing the data to be imported * @param stmt the PreparedStatement to be used for the import * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param columnTypeMap a map specifying the types of the columns * @return the number of rows affected * @throws SQLException if a database access error occurs */ @SuppressWarnings("rawtypes") public static int importData(final DataSet dataset, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) throws SQLException { return importData(dataset, Fn.alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeMap); } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(yourSelectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param the type of exception that might be thrown * @param dataset the DataSet containing the data to be imported * @param filter a predicate to filter the data * @param conn the Connection to the database * @param insertSQL the SQL insert statement; the column order in the SQL must be consistent with the column order in the DataSet * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param columnTypeMap a map specifying the types of the columns * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws E if the filter throws an exception */ @SuppressWarnings({ "rawtypes", "null" }) public static int importData(final DataSet dataset, final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) throws IllegalArgumentException, SQLException, E { // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); //NOSONAR N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", //NOSONAR batchSize, batchIntervalInMillis); final Throwables.BiConsumer stmtSetter = new Throwables.BiConsumer<>() { private int columnCount = 0; private Type[] columnTypes = null; private int[] columnIndexes = new int[columnCount]; @Override public void accept(final PreparedQuery t, final Object[] u) throws SQLException { if (columnTypes == null) { columnCount = columnTypeMap.size(); columnTypes = new Type[columnCount]; columnIndexes = new int[columnCount]; final List columnNameList = dataset.columnNameList(); final Set columnNameSet = N.newHashSet(columnCount); int idx = 0; for (final String columnName : columnNameList) { if (columnTypeMap.containsKey(columnName)) { columnIndexes[idx] = dataset.getColumnIndex(columnName); columnTypes[idx] = columnTypeMap.get(columnName); columnNameSet.add(columnName); idx++; } } if (columnNameSet.size() != columnTypeMap.size()) { final List keys = new ArrayList<>(columnTypeMap.keySet()); keys.removeAll(columnNameSet); throw new IllegalArgumentException(keys + " are not included in titles: " + N.toString(columnNameList)); } } for (int j = 0; j < columnCount; j++) { columnTypes[j].set(stmt, j + 1, dataset.get(columnIndexes[j])); } } }; return importData(dataset, filter, stmt, batchSize, batchIntervalInMillis, stmtSetter); } /** * Imports the data from {@code DataSet} to the database using the provided {@code PreparedStatement}. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* * @param dataset the DataSet containing the data to be imported * @param stmt the PreparedStatement to be used for the import * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final PreparedStatement stmt, final Throwables.BiConsumer stmtSetter) throws SQLException { return importData(dataset, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } /** * Imports the data from {@code DataSet} to the database using the provided {@code PreparedStatement}. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* * @param dataset the DataSet containing the data to be imported * @param stmt the PreparedStatement to be used for the import * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static int importData(final DataSet dataset, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { return importData(dataset, Fn.alwaysTrue(), stmt, batchSize, batchIntervalInMillis, stmtSetter); } /** * Imports the data from {@code DataSet} to the database. *

* The column order in the SQL must be consistent with the column order in the DataSet. *

* The specified {@code insertSQL} can be generated by below code: * *
     * 
     *   List columnNameList = new ArrayList<>(dataset.columnNameList());
     *   columnNameList.retainAll(yourSelectColumnNames);
     *   String sql = PSC.insert(columnNameList).into(tableName).sql();
     * 
     * 
* * @param the type of exception that might be thrown * @param dataset the DataSet containing the data to be imported * @param filter a predicate to filter the data * @param stmt the PreparedStatement to be used for the import * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws IllegalArgumentException if the offset or count is negative, or if batchSize is not greater than 0, or if batchIntervalInMillis is negative * @throws SQLException if a database access error occurs * @throws E if the filter throws an exception */ public static int importData(final DataSet dataset, final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws IllegalArgumentException, SQLException, E { // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", batchSize, batchIntervalInMillis); final PreparedQuery stmtForSetter = new PreparedQuery(stmt); final int columnCount = dataset.columnNameList().size(); final Object[] row = new Object[columnCount]; int result = 0; for (int i = 0, size = dataset.size(); result < size && i < size; i++) { dataset.absolute(i); for (int j = 0; j < columnCount; j++) { row[j] = dataset.get(j); } if (filter != null && !filter.test(row)) { continue; } stmtSetter.accept(stmtForSetter, row); stmtForSetter.addBatch(); if ((++result % batchSize) == 0) { JdbcUtil.executeBatch(stmt); if (batchIntervalInMillis > 0) { N.sleep(batchIntervalInMillis); } } } if ((result % batchSize) > 0) { JdbcUtil.executeBatch(stmt); } return result; } /** * Imports the data from a file to the database. * * @param the type of exception that might be thrown * @param file the file containing the data to be imported * @param sourceDataSource the DataSource to get the database connection * @param insertSQL the SQL insert statement; * @param func a function to process each line of the file and convert it to an array of objects for record insert. Returns a {@code null} array to skip the line. * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs * @throws E if the function throws an exception */ public static long importData(final File file, final javax.sql.DataSource sourceDataSource, final String insertSQL, final Throwables.Function func) throws SQLException, IOException, E { final Connection conn = sourceDataSource.getConnection(); try { return importData(file, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, func); } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } } /** * Imports the data from a file to the database using the provided {@code Connection}. * * @param the type of exception that might be thrown * @param file the file containing the data to be imported * @param conn the Connection to the database * @param insertSQL the SQL insert statement. * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param func a function to process each line of the file and convert it to an array of objects for record insert. Returns a {@code null} array to skip the line. * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs * @throws E if the function throws an exception */ public static long importData(final File file, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.Function func) throws SQLException, IOException, E { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { return importData(file, stmt, batchSize, batchIntervalInMillis, func); } } /** * Imports the data from a file to the database using the provided {@code PreparedStatement}. * * @param the type of exception that might be thrown * @param file the file containing the data to be imported * @param stmt the PreparedStatement to be used for the import * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param func a function to process each line of the file and convert it to an array of objects for record insert. Returns a {@code null} array to skip the line. * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs * @throws E if the function throws an exception */ public static long importData(final File file, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.Function func) throws SQLException, IOException, E { try (Reader reader = IOUtil.newFileReader(file)) { return importData(reader, stmt, batchSize, batchIntervalInMillis, func); } } /** * Imports the data from a {@code Reader} to the database. * * @param the type of exception that might be thrown * @param reader the Reader containing the data to be imported * @param sourceDataSource the DataSource to get the database connection * @param insertSQL the SQL insert statement. * @param func a function to process each line of the file and convert it to an array of objects for record insert. Returns a {@code null} array to skip the line. * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs * @throws E if the function throws an exception */ public static long importData(final Reader reader, final javax.sql.DataSource sourceDataSource, final String insertSQL, final Throwables.Function func) throws SQLException, IOException, E { final Connection conn = sourceDataSource.getConnection(); try { return importData(reader, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, func); } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } } /** * Imports the data from a {@code Reader} to the database using the provided {@code Connection}. * * @param the type of exception that might be thrown * @param reader the Reader containing the data to be imported * @param conn the Connection to the database * @param insertSQL the SQL insert statement. * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param func a function to process each line of the file and convert it to an array of objects for record insert. Returns a {@code null} array to skip the line. * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs * @throws E if the function throws an exception */ public static long importData(final Reader reader, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.Function func) throws SQLException, IOException, E { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { return importData(reader, stmt, batchSize, batchIntervalInMillis, func); } } /** * Imports the data from a {@code Reader} to the database using the provided {@code PreparedStatement}. * * @param the type of exception that might be thrown * @param reader the Reader containing the data to be imported * @param stmt the PreparedStatement to be used for the import * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param func a function to process each line of the file and convert it to an array of objects for record insert. Returns a {@code null} array to skip the line. * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs * @throws E if the function throws an exception */ public static long importData(final Reader reader, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.Function func) throws IllegalArgumentException, SQLException, IOException, E { // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", batchSize, batchIntervalInMillis); long result = 0; final BufferedReader br = Objectory.createBufferedReader(reader); try { String line = null; Object[] row = null; while ((line = br.readLine()) != null) { row = func.apply(line); if (row == null) { continue; } for (int i = 0, len = row.length; i < len; i++) { stmt.setObject(i + 1, row[i]); } stmt.addBatch(); if ((++result % batchSize) == 0) { JdbcUtil.executeBatch(stmt); if (batchIntervalInMillis > 0) { N.sleep(batchIntervalInMillis); } } } if ((result % batchSize) > 0) { JdbcUtil.executeBatch(stmt); } } finally { Objectory.recycle(br); } return result; } /** * Imports the data from an {@code Iterator} to the database. * * @param the type of elements in the iterator * @param iter the Iterator containing the data to be imported * @param sourceDataSource the DataSource to get the database connection * @param insertSQL the SQL insert statement. * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static long importData(final Iterator iter, final javax.sql.DataSource sourceDataSource, final String insertSQL, final Throwables.BiConsumer stmtSetter) throws SQLException { final Connection conn = sourceDataSource.getConnection(); try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { return importData(iter, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } } /** * Imports the data from an {@code Iterator} to the database using the provided {@code Connection}. * * @param the type of elements in the iterator * @param iter the Iterator containing the data to be imported * @param conn the Connection to the database * @param insertSQL the SQL insert statement. * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static long importData(final Iterator iter, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { return importData(iter, stmt, batchSize, batchIntervalInMillis, stmtSetter); } } /** * Imports the data from an {@code Iterator} to the database using the provided {@code PreparedStatement}. * * @param the type of elements in the iterator * @param iter the Iterator containing the data to be imported * @param stmt the PreparedStatement to be used for the import * @param batchSize the number of rows to be inserted in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs */ public static long importData(final Iterator iter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", batchSize, batchIntervalInMillis); final PreparedQuery stmtForSetter = new PreparedQuery(stmt); long result = 0; T next = null; while (iter.hasNext()) { next = iter.next(); stmtSetter.accept(stmtForSetter, next); stmtForSetter.addBatch(); if ((++result % batchSize) == 0) { JdbcUtil.executeBatch(stmt); if (batchIntervalInMillis > 0) { N.sleep(batchIntervalInMillis); } } } if ((result % batchSize) > 0) { JdbcUtil.executeBatch(stmt); } return result; } // /** // * // * @param file // * @param conn // * @param insertSQL // * @param columnTypeList // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final File file, final Connection conn, final String insertSQL, final List columnTypeList) // throws SQLException, IOException { // return importCSV(file, 0, Long.MAX_VALUE, true, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeList); // } // // /** // * // * @param file // * @param offset // * @param count // * @param skipTitle // * @param conn // * @param insertSQL // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeList // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings({ "unchecked", "rawtypes" }) // public static long importCSV(final File file, final boolean skipTitle, final Connection conn, final String insertSQL, // final int batchSize, final long batchIntervalInMillis, final List columnTypeList) throws SQLException, IOException { // return importCSV(file, skipTitle, Fn. alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, columnTypeList); // } // // /** // * Imports the data from CSV to database. // * // * @param // * @param file // * @param offset // * @param count // * @param skipTitle // * @param filter // * @param conn // * @param insertSQL the column order in the sql must be consistent with the column order in the CSV file. // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeList set the column type to null to skip the column in CSV. // * @return // * @throws SQLException // * @throws IOException // * @throws E // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final File file, final boolean skipTitle, // final Throwables.Predicate filter, final Connection conn, final String insertSQL, final int batchSize, // final long batchIntervalInMillis, final List columnTypeList) throws SQLException, IOException, E { // // try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { // return importCSV(file, skipTitle, filter, stmt, batchSize, batchIntervalInMillis, columnTypeList); // } // } // // /** // * // * @param file // * @param stmt // * @param columnTypeList // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final File file, final PreparedStatement stmt, final List columnTypeList) throws SQLException, IOException { // return importCSV(file, 0, Long.MAX_VALUE, true, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeList); // } // // /** // * // * @param file // * @param offset // * @param count // * @param skipTitle // * @param stmt // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeList // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings({ "unchecked", "rawtypes" }) // public static long importCSV(final File file, final boolean skipTitle, final PreparedStatement stmt, final int batchSize, // final long batchIntervalInMillis, final List columnTypeList) throws SQLException, IOException { // return importCSV(file, skipTitle, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeList); // } // // /** // * Imports the data from CSV to database. // * // * @param // * @param file // * @param offset // * @param count // * @param skipTitle // * @param filter // * @param stmt the column order in the sql must be consistent with the column order in the CSV file. // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeList set the column type to null to skip the column in CSV. // * @return // * @throws SQLException // * @throws IOException // * @throws E // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final File file, final boolean skipTitle, // final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, // final List columnTypeList) throws SQLException, IOException, E { // // try (Reader reader = new FileReader(file)) { // return importCSV(reader, skipTitle, filter, stmt, batchSize, batchIntervalInMillis, columnTypeList); // } // } // // /** // * // * @param is // * @param stmt // * @param columnTypeList // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final InputStream is, final PreparedStatement stmt, final List columnTypeList) // throws SQLException, IOException { // return importCSV(is, 0, Long.MAX_VALUE, true, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeList); // } // // /** // * // * @param is // * @param offset // * @param count // * @param skipTitle // * @param stmt // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeList // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings({ "unchecked", "rawtypes" }) // public static long importCSV(final InputStream is, final boolean skipTitle, final PreparedStatement stmt, // final int batchSize, final long batchIntervalInMillis, final List columnTypeList) throws SQLException, IOException { // return importCSV(is, skipTitle, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeList); // } // // /** // * Imports the data from CSV to database. // * // * @param // * @param is // * @param offset // * @param count // * @param skipTitle // * @param filter // * @param stmt the column order in the sql must be consistent with the column order in the CSV file. // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeList set the column type to null to skip the column in CSV. // * @return // * @throws SQLException // * @throws IOException // * @throws E // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final InputStream is, final boolean skipTitle, // final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, // final List columnTypeList) throws SQLException, IOException, E { // final Reader reader = new InputStreamReader(is); // // return importCSV(reader, skipTitle, filter, stmt, batchSize, batchIntervalInMillis, columnTypeList); // } // // /** // * // * @param reader // * @param stmt // * @param columnTypeList // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final Reader reader, final PreparedStatement stmt, final List columnTypeList) // throws SQLException, IOException { // return importCSV(reader, 0, Long.MAX_VALUE, true, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeList); // } // // /** // * // * @param reader // * @param offset // * @param count // * @param skipTitle // * @param stmt // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeList // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings({ "unchecked", "rawtypes" }) // public static long importCSV(final Reader reader, final boolean skipTitle, final PreparedStatement stmt, final int batchSize, // final long batchIntervalInMillis, final List columnTypeList) throws SQLException, IOException { // return importCSV(reader, skipTitle, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeList); // } // // /** // * Imports the data from CSV to database. // * // * @param // * @param reader // * @param offset // * @param count // * @param skipTitle // * @param filter // * @param stmt the column order in the sql must be consistent with the column order in the CSV file. // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeList set the column type to null to skip the column in CSV. // * @return // * @throws SQLException // * @throws IOException // * @throws E // */ // @SuppressWarnings({ "unchecked", "rawtypes" }) // public static long importCSV(final Reader reader, final boolean skipTitle, // final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, // final List columnTypeList) throws SQLException, IOException, E { // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); // N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", // batchSize, batchIntervalInMillis); // // final BiConsumer lineParser = CSVUtil.getCurrentLineParser(); // long result = 0; // final BufferedReader br = Objectory.createBufferedReader(reader); // // try { // if (skipTitle) { // br.readLine(); // skip the title line. // } // // while (offset-- > 0 && br.readLine() != null) { // // skip. // } // // final Type[] columnTypes = columnTypeList.toArray(new Type[columnTypeList.size()]); // final String[] strs = new String[columnTypeList.size()]; // String line = null; // Type type = null; // // while (result < count && (line = br.readLine()) != null) { // lineParser.accept(strs, line); // // if (filter != null && !filter.test(strs)) { // continue; // } // // for (int i = 0, parameterIndex = 1, len = strs.length; i < len; i++) { // type = columnTypes[i]; // // if (type == null) { // continue; // } // // type.set(stmt, parameterIndex++, (strs[i] == null) ? null : type.valueOf(strs[i])); // } // // stmt.addBatch(); // // result++; // // if ((result % batchSize) == 0) { // JdbcUtil.executeBatch(stmt); // // if (batchIntervalInMillis > 0) { // N.sleep(batchIntervalInMillis); // } // } // // N.fill(strs, null); // } // // if ((result % batchSize) > 0) { // JdbcUtil.executeBatch(stmt); // } // } finally { // Objectory.recycle(br); // } // // return result; // } // // /** // * Imports the data from CSV to database. // * // * @param file // * @param conn // * @param insertSQL the column order in the sql must be consistent with the column order in the CSV file. // * @param columnTypeMap // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final File file, final Connection conn, final String insertSQL, final Map columnTypeMap) // throws SQLException, IOException { // return importCSV(file, 0, Long.MAX_VALUE, conn, insertSQL, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeMap); // } // // /** // * // * @param file // * @param offset // * @param count // * @param conn // * @param insertSQL // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeMap // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final File file, final Connection conn, final String insertSQL, final int batchSize, // final long batchIntervalInMillis, final Map columnTypeMap) throws SQLException, IOException { // return importCSV(file, Fn. alwaysTrue(), conn, insertSQL, batchSize, batchIntervalInMillis, columnTypeMap); // } // // /** // * // * @param // * @param file // * @param offset // * @param count // * @param filter // * @param conn // * @param insertSQL the column order in the sql must be consistent with the column order in the CSV file. // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeMap // * @return // * @throws SQLException // * @throws IOException // * @throws E // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final File file, final Throwables.Predicate filter, // final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, // final Map columnTypeMap) throws SQLException, IOException, E { // // try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { // return importCSV(file, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); // } // } // // /** // * // * @param file // * @param stmt // * @param columnTypeMap // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final File file, final PreparedStatement stmt, final Map columnTypeMap) // throws SQLException, IOException { // return importCSV(file, 0, Long.MAX_VALUE, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeMap); // } // // /** // * // * @param file // * @param offset // * @param count // * @param stmt // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeMap // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final File file, final PreparedStatement stmt, final int batchSize, // final long batchIntervalInMillis, final Map columnTypeMap) throws SQLException, IOException { // return importCSV(file, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeMap); // } // // /** // * Imports the data from CSV to database. // * // * @param // * @param file // * @param offset // * @param count // * @param filter // * @param stmt the column order in the sql must be consistent with the column order in the CSV file. // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeMap // * @return // * @throws SQLException // * @throws IOException // * @throws E // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final File file, final Throwables.Predicate filter, // final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) // throws SQLException, IOException, E { // // try (Reader reader = new FileReader(file)) { // return importCSV(reader, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); // } // } // // /** // * // * @param is // * @param stmt // * @param columnTypeMap // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final InputStream is, final PreparedStatement stmt, final Map columnTypeMap) // throws SQLException, IOException { // return importCSV(is, 0, Long.MAX_VALUE, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeMap); // } // // /** // * // * @param is // * @param offset // * @param count // * @param stmt // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeMap // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final InputStream is, final PreparedStatement stmt, final int batchSize, // final long batchIntervalInMillis, final Map columnTypeMap) throws SQLException, IOException { // return importCSV(is, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeMap); // } // // /** // * Imports the data from CSV to database. // * // * @param // * @param is // * @param offset // * @param count // * @param filter // * @param stmt the column order in the sql must be consistent with the column order in the CSV file. // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeMap // * @return // * @throws SQLException // * @throws IOException // * @throws E // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final InputStream is, final Throwables.Predicate filter, // final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) // throws SQLException, IOException, E { // final Reader reader = new InputStreamReader(is); // return importCSV(reader, filter, stmt, batchSize, batchIntervalInMillis, columnTypeMap); // } // // /** // * // * @param reader // * @param stmt // * @param columnTypeMap // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings("rawtypes") // public static long importCSV(final Reader reader, final PreparedStatement stmt, final Map columnTypeMap) // throws SQLException, IOException { // return importCSV(reader, 0, Long.MAX_VALUE, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, columnTypeMap); // } // // /** // * // * @param reader // * @param offset // * @param count // * @param stmt // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeMap // * @return // * @throws SQLException // * @throws IOException // */ // @SuppressWarnings({ "unchecked", "rawtypes" }) // public static long importCSV(final Reader reader, final PreparedStatement stmt, final int batchSize, // final long batchIntervalInMillis, final Map columnTypeMap) throws SQLException, IOException { // return importCSV(reader, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, columnTypeMap); // } // // /** // * Imports the data from CSV to database. // * // * @param // * @param reader // * @param offset // * @param count // * @param filter // * @param stmt the column order in the sql must be consistent with the column order in the CSV file. // * @param batchSize // * @param batchIntervalInMillis // * @param columnTypeMap // * @return // * @throws SQLException // * @throws IOException // * @throws E // */ // @SuppressWarnings({ "unchecked", "rawtypes" }) // public static long importCSV(final Reader reader, final Throwables.Predicate filter, // final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Map columnTypeMap) // throws SQLException, IOException, E { // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); // N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", // batchSize, batchIntervalInMillis); // // final Function headerParser = CSVUtil.getCurrentHeaderParser(); // final BiConsumer lineParser = CSVUtil.getCurrentLineParser(); // long result = 0; // final BufferedReader br = Objectory.createBufferedReader(reader); // // try { // String line = br.readLine(); // final String[] titles = headerParser.apply(line); // // final Type[] columnTypes = new Type[titles.length]; // final List columnNameList = new ArrayList<>(columnTypeMap.size()); // // for (int i = 0, columnCount = titles.length; i < columnCount; i++) { // if (columnTypeMap.containsKey(titles[i])) { // columnTypes[i] = columnTypeMap.get(titles[i]); // columnNameList.add(titles[i]); // } // } // // if (columnNameList.size() != columnTypeMap.size()) { // final List keys = new ArrayList<>(columnTypeMap.keySet()); // keys.removeAll(columnNameList); // throw new IllegalArgumentException(keys + " are not included in titles: " + N.toString(titles)); // } // // while (offset-- > 0 && br.readLine() != null) { // // skip. // } // // final boolean isNullOrEmptyTypes = N.isEmpty(columnTypes); // final String[] strs = new String[titles.length]; // Type type = null; // // while (result < count && (line = br.readLine()) != null) { // lineParser.accept(strs, line); // // if (filter != null && !filter.test(strs)) { // continue; // } // // if (isNullOrEmptyTypes) { // for (int i = 0, len = strs.length; i < len; i++) { // stmt.setObject(i + 1, strs[i]); // } // } else { // for (int i = 0, parameterIndex = 1, len = strs.length; i < len; i++) { // type = columnTypes[i]; // // if (type == null) { // continue; // } // // type.set(stmt, parameterIndex++, (strs[i] == null) ? null : type.valueOf(strs[i])); // } // } // // stmt.addBatch(); // // result++; // // if ((result % batchSize) == 0) { // JdbcUtil.executeBatch(stmt); // // if (batchIntervalInMillis > 0) { // N.sleep(batchIntervalInMillis); // } // } // // N.fill(strs, null); // } // // if ((result % batchSize) > 0) { // JdbcUtil.executeBatch(stmt); // } // } finally { // Objectory.recycle(br); // } // // return result; // } /** * Imports the data from CSV to database. * * @param file * @param sourceDataSource * @param insertSQL the column order in the sql should be consistent with the column order in the CSV file. * @param stmtSetter * @return * @throws SQLException * @throws IOException */ public static long importCSV(final File file, final javax.sql.DataSource sourceDataSource, final String insertSQL, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { final Connection conn = sourceDataSource.getConnection(); try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { return importCSV(file, stmt, stmtSetter); } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } } /** * Imports the data from a CSV file to the database using the provided connection. *

* This method reads data from the specified CSV file and inserts it into the database * using the provided connection and SQL insert statement. The column order in the SQL * statement should be consistent with the column order in the CSV file. *

* * @param file the CSV file containing the data to be imported * @param conn the database connection to be used * @param insertSQL the SQL insert statement * @param batchSize the number of rows to be batched together for insertion * @param batchIntervalInMillis the interval in milliseconds between batch insertions * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long importCSV(final File file, final Connection conn, final String insertSQL, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { return importCSV(file, stmt, batchSize, batchIntervalInMillis, stmtSetter); } } /** * Imports the data from a CSV file to the database using the provided PreparedStatement. *

* This method reads data from the specified CSV file and inserts it into the database * using the provided PreparedStatement. The column order in the SQL statement should * be consistent with the column order in the CSV file. *

* * @param file the CSV file containing the data to be imported * @param stmt the PreparedStatement to be used for the insertion * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long importCSV(final File file, final PreparedStatement stmt, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { return importCSV(file, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } /** * Imports the data from a CSV file to the database using the provided PreparedStatement. *

* This method reads data from the specified CSV file and inserts it into the database * using the provided PreparedStatement. The column order in the SQL statement should * be consistent with the column order in the CSV file. *

* * @param file the CSV file containing the data to be imported * @param stmt the PreparedStatement to be used for the insertion * @param batchSize the number of rows to be batched together for insertion * @param batchIntervalInMillis the interval in milliseconds between batch insertions * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long importCSV(final File file, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { return importCSV(file, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, stmtSetter); } /** * Imports the data from a CSV file to the database using the provided PreparedStatement. *

* This method reads data from the specified CSV file and inserts it into the database * using the provided PreparedStatement. The column order in the SQL statement should * be consistent with the column order in the CSV file. *

* * @param the type of exception that may be thrown by the filter * @param file the CSV file containing the data to be imported * @param filter a predicate to filter the rows to be imported * @param stmt the PreparedStatement to be used for the insertion * @param batchSize the number of rows to be batched together for insertion * @param batchIntervalInMillis the interval in milliseconds between batch insertions * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs * @throws E if the filter throws an exception */ public static long importCSV(final File file, final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException, E { try (Reader reader = IOUtil.newFileReader(file)) { return importCSV(reader, filter, stmt, batchSize, batchIntervalInMillis, stmtSetter); } } /** * Imports the data from a CSV file to the database using the provided DataSource and SQL insert statement. *

* This method reads data from the specified CSV file and inserts it into the database * using the provided DataSource and SQL insert statement. The column order in the SQL statement should * be consistent with the column order in the CSV file. *

* * @param reader the Reader to read the CSV file * @param sourceDataSource the DataSource to obtain the database connection * @param insertSQL the SQL insert statement to be used for the insertion * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long importCSV(final Reader reader, final javax.sql.DataSource sourceDataSource, final String insertSQL, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { final Connection conn = sourceDataSource.getConnection(); try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, insertSQL)) { return importCSV(reader, stmt, stmtSetter); } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } } /** * Imports the data from a CSV file to the database using the provided PreparedStatement. *

* This method reads data from the specified CSV file and inserts it into the database * using the provided PreparedStatement. The column order in the SQL statement should * be consistent with the column order in the CSV file. *

* * @param reader the Reader to read the CSV file * @param stmt the PreparedStatement to be used for the insertion * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long importCSV(final Reader reader, final PreparedStatement stmt, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { return importCSV(reader, stmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } /** * Imports the data from a CSV file to the database using the provided PreparedStatement. *

* This method reads data from the specified CSV file and inserts it into the database * using the provided PreparedStatement. The column order in the SQL statement should * be consistent with the column order in the CSV file. *

* * @param reader the Reader to read the CSV file * @param stmt the PreparedStatement to be used for the insertion * @param batchSize the number of rows to be batched together for insertion * @param batchIntervalInMillis the interval in milliseconds between batch insertions * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ @SuppressWarnings({ "unchecked" }) public static long importCSV(final Reader reader, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException, IOException { return importCSV(reader, Fn. alwaysTrue(), stmt, batchSize, batchIntervalInMillis, stmtSetter); } /** * Imports the data from a CSV file to the database using the provided PreparedStatement. *

* This method reads data from the specified CSV file and inserts it into the database * using the provided PreparedStatement. The column order in the SQL statement should * be consistent with the column order in the CSV file. *

* * @param the type of exception that may be thrown by the filter * @param reader the Reader to read the CSV file * @param filter a Predicate to filter the rows to be inserted * @param stmt the PreparedStatement to be used for the insertion * @param batchSize the number of rows to be batched together for insertion * @param batchIntervalInMillis the interval in milliseconds between batch insertions * @param stmtSetter a BiConsumer to set the parameters of the PreparedStatement * @return the number of rows affected * @throws IllegalArgumentException if the batch size or batch interval is invalid * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs * @throws E if the filter throws an exception */ @SuppressWarnings({ "unchecked", "resource" }) public static long importCSV(final Reader reader, final Throwables.Predicate filter, final PreparedStatement stmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws IllegalArgumentException, SQLException, IOException, E { // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", batchSize, batchIntervalInMillis); final PreparedQuery stmtForSetter = new PreparedQuery(stmt); final Function headerParser = CSVUtil.getCurrentHeaderParser(); final BiConsumer lineParser = CSVUtil.getCurrentLineParser(); long result = 0; final BufferedReader br = Objectory.createBufferedReader(reader); try { String line = br.readLine(); final String[] titles = headerParser.apply(line); final String[] output = new String[titles.length]; while ((line = br.readLine()) != null) { lineParser.accept(line, output); if (filter != null && !filter.test(output)) { continue; } stmtSetter.accept(stmtForSetter, output); stmtForSetter.addBatch(); if ((++result % batchSize) == 0) { JdbcUtil.executeBatch(stmt); if (batchIntervalInMillis > 0) { N.sleep(batchIntervalInMillis); } } N.fill(output, null); } if ((result % batchSize) > 0) { JdbcUtil.executeBatch(stmt); } } finally { Objectory.recycle(br); } return result; } /** * Exports the data from the database to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title. *

* * @param out the File to write the CSV data to * @param sourceDataSource the DataSource to get the database connection from * @param querySQL the SQL query to execute to retrieve the data * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final File out, final javax.sql.DataSource sourceDataSource, final String querySQL) throws SQLException, IOException { final Connection conn = sourceDataSource.getConnection(); try { return exportCSV(out, conn, querySQL); } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } } /** * Exports the data from the database to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title. *

* * @param out the File to write the CSV data to * @param conn the Connection to the database * @param querySQL the SQL query to execute to retrieve the data * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final File out, final Connection conn, final String querySQL) throws SQLException, IOException { return exportCSV(out, conn, querySQL, true, true); } /** * Exports the data from the database to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}. * Each value will be quoted if {@code quoted} is {@code true}. *

* * @param out the File to write the CSV data to * @param conn the Connection to the database * @param querySQL the SQL query to execute to retrieve the data * @param writeTitle whether to write the column names as the first line * @param quoted whether to quote each value in the CSV file * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final File out, final Connection conn, final String querySQL, final boolean writeTitle, final boolean quoted) throws SQLException, IOException { return exportCSV(out, conn, querySQL, null, writeTitle, quoted); } /** * Exports the data from the database to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}. * Each value will be quoted if {@code quoted} is {@code true}. *

* * @param out the File to write the CSV data to * @param conn the Connection to the database * @param querySQL the SQL query to execute to retrieve the data * @param selectColumnNames the collection of column names to be selected * @param offset the starting point of the data to be exported * @param count the number of rows to be exported * @param writeTitle whether to write the column names as the first line * @param quoted whether to quote each value in the CSV file * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final File out, final Connection conn, final String querySQL, final Collection selectColumnNames, final boolean writeTitle, final boolean quoted) throws SQLException, IOException { final ParsedSql sql = ParsedSql.parse(querySQL); try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, sql.getParameterizedSql(), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)) { setFetchForBigResult(conn, stmt); return exportCSV(out, stmt, selectColumnNames, writeTitle, quoted); } } /** * Exports the data from the database to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title, and each value will be quoted. *

* * @param out the File to write the CSV data to * @param stmt the PreparedStatement to execute to retrieve the data * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ @SuppressWarnings("unchecked") public static long exportCSV(final File out, final PreparedStatement stmt) throws SQLException, IOException { return exportCSV(out, stmt, true, true); } /** * Exports the data from the database to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}. * Each value will be quoted if {@code quoted} is {@code true}. *

* * @param out the File to write the CSV data to * @param stmt the PreparedStatement to execute to retrieve the data * @param writeTitle whether to write the column names as the first line * @param quoted whether to quote each value in the CSV file * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final File out, final PreparedStatement stmt, final boolean writeTitle, final boolean quoted) throws SQLException, IOException { return exportCSV(out, stmt, null, writeTitle, quoted); } /** * Exports the data from the database to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}. * Each value will be quoted if {@code quoted} is {@code true}. *

* * @param out the File to write the CSV data to * @param stmt the PreparedStatement to execute to retrieve the data * @param selectColumnNames the collection of column names to be selected * @param writeTitle whether to write the column names as the first line * @param quoted whether to quote each value in the CSV file * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final File out, final PreparedStatement stmt, final Collection selectColumnNames, final boolean writeTitle, final boolean quoted) throws SQLException, IOException { ResultSet rs = null; try { rs = JdbcUtil.executeQuery(stmt); // rs.setFetchSize(DEFAULT_FETCH_SIZE); return exportCSV(out, rs, selectColumnNames, writeTitle, quoted); } finally { JdbcUtil.closeQuietly(rs); } } /** * Exports the data from the ResultSet to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title, and each value will be quoted. *

* * @param out the File to write the CSV data to * @param rs the ResultSet containing the data to be exported * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final File out, final ResultSet rs) throws SQLException, IOException { return exportCSV(out, rs, true, true); } /** * Exports the data from the ResultSet to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}. * Each value will be quoted if {@code quoted} is {@code true}. *

* * @param out the File to write the CSV data to * @param rs the ResultSet containing the data to be exported * @param writeTitle whether to write the column names as the first line * @param quoted whether to quote each value in the CSV file * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final File out, final ResultSet rs, final boolean writeTitle, final boolean quoted) throws SQLException, IOException { return exportCSV(out, rs, null, writeTitle, quoted); } /** * Exports the data from the ResultSet to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}. * Each value will be quoted if {@code quoted} is {@code true}. *

* * @param out the File to write the CSV data to * @param rs the ResultSet containing the data to be exported * @param selectColumnNames the collection of column names to be selected * @param writeTitle whether to write the column names as the first line * @param quoted whether to quote each value in the CSV file * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final File out, final ResultSet rs, final Collection selectColumnNames, final boolean writeTitle, final boolean quoted) throws SQLException, IOException { if (!out.exists()) { out.createNewFile(); //NOSONAR } try (Writer writer = IOUtil.newFileWriter(out)) { return exportCSV(writer, rs, selectColumnNames, writeTitle, quoted); } } /** * Exports the data from the database to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title, and each value will be quoted. *

* * @param out the Writer to write the CSV data to * @param sourceDataSource the DataSource to get the database connection from * @param querySQL the SQL query to execute to retrieve the data * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final Writer out, final javax.sql.DataSource sourceDataSource, final String querySQL) throws SQLException, IOException { final Connection conn = sourceDataSource.getConnection(); try { return exportCSV(out, conn, querySQL); } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } } /** * Exports the data from the database to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title, and each value will be quoted. *

* * @param out the Writer to write the CSV data to * @param conn the Connection to the database * @param querySQL the SQL query to execute to retrieve the data * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final Writer out, final Connection conn, final String querySQL) throws SQLException, IOException { final ParsedSql sql = ParsedSql.parse(querySQL); try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, sql.getParameterizedSql(), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); ResultSet rs = JdbcUtil.executeQuery(stmt)) { setFetchForBigResult(conn, stmt); return exportCSV(out, rs); } } /** * Exports the data from the ResultSet to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title, and each value will be quoted. *

* * @param out the Writer to write the CSV data to * @param rs the ResultSet containing the data to be exported * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final Writer out, final ResultSet rs) throws SQLException, IOException { return exportCSV(out, rs, true, true); } /** * Exports the data from the ResultSet to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}, * and each value will be quoted if {@code quoted} is {@code true}. *

* * @param out the Writer to write the CSV data to * @param rs the ResultSet containing the data to be exported * @param writeTitle whether to write the column names as the first line of the CSV file * @param quoted whether to quote each value in the CSV file * @return the number of rows exported * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ public static long exportCSV(final Writer out, final ResultSet rs, final boolean writeTitle, final boolean quoted) throws SQLException, IOException { return exportCSV(out, rs, null, writeTitle, quoted); } /** * Exports the data from the ResultSet to a CSV file. *

* Each line in the output file/Writer is an array of JSON String without root bracket. * The first line of the CSV file will contain the column names as the title if {@code writeTitle} is {@code true}, * and each value will be quoted if {@code quoted} is {@code true}. *

* * @param out the Writer to write the CSV data to * @param rs the ResultSet containing the data to be exported * @param selectColumnNames the collection of column names to be selected for export * @param writeTitle whether to write the column names as the first line of the CSV file * @param quoted whether to quote each value in the CSV file * @return the number of rows exported * @throws IllegalArgumentException if an argument is invalid * @throws SQLException if a database access error occurs * @throws IOException if an I/O error occurs */ @SuppressWarnings("deprecation") public static long exportCSV(final Writer out, final ResultSet rs, final Collection selectColumnNames, final boolean writeTitle, final boolean quoted) throws IllegalArgumentException, SQLException, IOException { // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); final JSONSerializationConfig config = JSC.create(); config.setDateTimeFormat(DateTimeFormat.ISO_8601_TIMESTAMP); if (quoted) { config.setCharQuotation(WD._QUOTATION_D); config.setStringQuotation(WD._QUOTATION_D); } else { config.setCharQuotation((char) 0); config.setStringQuotation((char) 0); } long result = 0; final Type strType = N.typeOf(String.class); final boolean isBufferedJSONWriter = out instanceof BufferedJSONWriter; final BufferedJSONWriter bw = isBufferedJSONWriter ? (BufferedJSONWriter) out : Objectory.createBufferedJSONWriter(out); try { final boolean checkDateType = JdbcUtil.checkDateType(rs); final ResultSetMetaData rsmd = rs.getMetaData(); final int columnCount = rsmd.getColumnCount(); final String[] columnNames = new String[columnCount]; final Set columnNameSet = selectColumnNames == null ? null : N.newHashSet(selectColumnNames); String label = null; for (int i = 0; i < columnCount; i++) { label = JdbcUtil.getColumnLabel(rsmd, i + 1); if (columnNameSet == null || columnNameSet.remove(label)) { columnNames[i] = label; } } if (columnNameSet != null && columnNameSet.size() > 0) { throw new IllegalArgumentException(columnNameSet + " are not included in query result"); } if (writeTitle) { for (int i = 0, j = 0, len = columnNames.length; i < len; i++) { if (columnNames[i] == null) { continue; } if (j++ > 0) { bw.write(ELEMENT_SEPARATOR_CHAR_ARRAY); } if (quoted) { bw.write(WD._QUOTATION_D); bw.write(columnNames[i]); bw.write(WD._QUOTATION_D); } else { bw.write(columnNames[i]); } } bw.write(IOUtil.LINE_SEPARATOR); } final Type[] typeArray = new Type[columnCount]; Type type = null; Object value = null; while (rs.next()) { if (result++ > 0) { bw.write(IOUtil.LINE_SEPARATOR); } for (int i = 0, j = 0; i < columnCount; i++) { if (columnNames[i] == null) { continue; } if (j++ > 0) { bw.write(ELEMENT_SEPARATOR_CHAR_ARRAY); } type = typeArray[i]; if (type == null) { value = JdbcUtil.getColumnValue(rs, i + 1, checkDateType); if (value == null) { bw.write(NULL_CHAR_ARRAY); } else { type = N.typeOf(value.getClass()); typeArray[i] = type; if (type.isSerializable()) { type.writeCharacter(bw, value, config); } else { type.writeCharacter(bw, CSVUtil.jsonParser.serialize(value, config), config); } } } else { if (type.isSerializable()) { type.writeCharacter(bw, type.get(rs, i + 1), config); } else { strType.writeCharacter(bw, CSVUtil.jsonParser.serialize(type.get(rs, i + 1), config), config); } } } } bw.flush(); } finally { if (!isBufferedJSONWriter) { Objectory.recycle(bw); } } return result; } private static final Supplier> supplierOfStmtSetterByRS = () -> new Throwables.BiConsumer<>() { private int columnCount = 0; @Override public void accept(final PreparedQuery stmt, final ResultSet rs) throws SQLException { if (columnCount == 0) { columnCount = rs.getMetaData().getColumnCount(); } for (int columnIndex = 1; columnIndex <= columnCount; columnIndex++) { stmt.setObject(columnIndex, JdbcUtil.getColumnValue(rs, columnIndex)); } } }; /** * Creates a parameter setter for a PreparedQuery using the provided ColumnGetter. *

* This method returns a BiConsumer that sets parameters on a PreparedQuery based on the values * from a ResultSet using the specified ColumnGetter. *

* * @param columnGetterForAll the ColumnGetter to use for setting parameters * @return a BiConsumer that sets parameters on a PreparedQuery using the ResultSet */ @Beta @SequentialOnly @Stateful public static Throwables.BiConsumer createParamSetter(final ColumnGetter columnGetterForAll) { return new Throwables.BiConsumer<>() { private int columnCount = -1; @Override public void accept(final PreparedQuery stmt, final ResultSet rs) throws SQLException { if (columnCount < 0) { columnCount = JdbcUtil.getColumnCount(rs); } for (int i = 1; i <= columnCount; i++) { stmt.setObject(i, columnGetterForAll.apply(rs, i)); } } }; } /** * Copies data from a source data source to a target data source for the specified table. *

* This method copies all data from the table in the source data source to the table in the target data source. *

* * @param sourceDataSource the data source from which to copy data * @param targetDataSource the data source to which to copy data * @param tableName the name of the table to copy * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final javax.sql.DataSource targetDataSource, final String tableName) throws SQLException { return copy(sourceDataSource, targetDataSource, tableName, tableName); } /** * Copies data from a source data source to a target data source for the specified table. *

* This method copies all data from the table in the source data source to the table in the target data source. *

* * @param sourceDataSource the data source from which to copy data * @param targetDataSource the data source to which to copy data * @param sourceTableName the name of the table in the source data source * @param targetTableName the name of the table in the target data source * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final javax.sql.DataSource targetDataSource, final String sourceTableName, final String targetTableName) throws SQLException { return copy(sourceDataSource, targetDataSource, sourceTableName, targetTableName, JdbcUtil.DEFAULT_BATCH_SIZE); } /** * Copies data from a source data source to a target data source for the specified table. *

* This method copies all data from the table in the source data source to the table in the target data source. *

* * @param sourceDataSource the data source from which to copy data * @param targetDataSource the data source to which to copy data * @param sourceTableName the name of the table in the source data source * @param targetTableName the name of the table in the target data source * @param batchSize the number of rows to copy in each batch * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final javax.sql.DataSource targetDataSource, final String sourceTableName, final String targetTableName, final int batchSize) throws SQLException { String selectSql = null; String insertSql = null; Connection conn = null; try { conn = sourceDataSource.getConnection(); selectSql = JdbcCodeGenerationUtil.generateSelectSql(conn, sourceTableName); insertSql = JdbcCodeGenerationUtil.generateInsertSql(conn, sourceTableName); if (!sourceTableName.equals(targetTableName)) { insertSql = Strings.replaceFirstIgnoreCase(insertSql, sourceTableName, targetTableName); } } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } return copy(sourceDataSource, selectSql, N.max(JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, batchSize), targetDataSource, insertSql, batchSize); } /** * Copies data from a source data source to a target data source for the specified table and columns. *

* This method copies data from the specified columns of the table in the source data source to the table in the target data source. *

* * @param sourceDataSource the data source from which to copy data * @param targetDataSource the data source to which to copy data * @param sourceTableName the name of the table in the source data source * @param targetTableName the name of the table in the target data source * @param selectColumnNames the collection of column names to copy * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final javax.sql.DataSource targetDataSource, final String sourceTableName, final String targetTableName, final Collection selectColumnNames) throws SQLException { return copy(sourceDataSource, targetDataSource, sourceTableName, targetTableName, selectColumnNames, JdbcUtil.DEFAULT_BATCH_SIZE); } /** * Copies data from a source data source to a target data source for the specified table and columns. *

* This method copies data from the specified columns of the table in the source data source to the table in the target data source. *

* * @param sourceDataSource the data source from which to copy data * @param targetDataSource the data source to which to copy data * @param sourceTableName the name of the table in the source data source * @param targetTableName the name of the table in the target data source * @param selectColumnNames the collection of column names to copy * @param batchSize the number of rows to copy in each batch * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final javax.sql.DataSource targetDataSource, final String sourceTableName, final String targetTableName, final Collection selectColumnNames, final int batchSize) throws SQLException { String selectSql = null; String insertSql = null; Connection conn = null; try { conn = sourceDataSource.getConnection(); selectSql = generateSelectSql(conn, sourceTableName, selectColumnNames); insertSql = generateInsertSql(conn, sourceTableName, selectColumnNames); if (!sourceTableName.equals(targetTableName)) { insertSql = Strings.replaceFirstIgnoreCase(insertSql, sourceTableName, targetTableName); } } finally { JdbcUtil.releaseConnection(conn, sourceDataSource); } return copy(sourceDataSource, selectSql, N.max(JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, batchSize), targetDataSource, insertSql, batchSize); } /** * Copies data from a source data source to a target data source using the specified SQL queries. *

* This method copies data from the result of the select SQL query in the source data source to the target data source using the insert SQL query. *

* * @param sourceDataSource the data source from which to copy data * @param selectSql the SQL query to select data from the source data source * @param targetDataSource the data source to which to copy data * @param insertSql the SQL query to insert data into the target data source * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final String selectSql, final javax.sql.DataSource targetDataSource, final String insertSql) throws SQLException { return copy(sourceDataSource, selectSql, JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, targetDataSource, insertSql, JdbcUtil.DEFAULT_BATCH_SIZE); } /** * Copies data from a source data source to a target data source using the specified SQL queries and fetch size. *

* This method copies data from the result of the select SQL query in the source data source to the target data source using the insert SQL query. *

* * @param sourceDataSource the data source from which to copy data * @param selectSql the SQL query to select data from the source data source * @param fetchSize it should be bigger than {@code batchSize}. It can be x times {@code batchSize}, depends on how big one record is and how much memory is available. * @param targetDataSource the data source to which to copy data * @param insertSql the SQL query to insert data into the target data source * @param batchSize the number of rows to copy in each batch * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final String selectSql, final int fetchSize, final javax.sql.DataSource targetDataSource, final String insertSql, final int batchSize) throws SQLException { return copy(sourceDataSource, selectSql, fetchSize, targetDataSource, insertSql, batchSize, 0, supplierOfStmtSetterByRS.get()); } /** * Copies data from a source data source to a target data source using the specified SQL queries and statement setter. *

* This method executes a select SQL query on the source data source and inserts the result into the target data source using the provided insert SQL query. * The statement setter is used to set parameters on the prepared statement. *

* * @param sourceDataSource the data source from which to copy data * @param selectSql the SQL query to select data from the source data source * @param targetDataSource the data source to which to copy data * @param insertSql the SQL query to insert data into the target data source * @param stmtSetter a bi-consumer to set parameters on the prepared statement * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final String selectSql, final javax.sql.DataSource targetDataSource, final String insertSql, final Throwables.BiConsumer stmtSetter) throws SQLException { return copy(sourceDataSource, selectSql, JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, targetDataSource, insertSql, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } /** * Copies data from a source data source to a target data source using the specified SQL queries and statement setter. *

* This method executes a select SQL query on the source data source and inserts the result into the target data source using the provided insert SQL query. * The statement setter is used to set parameters on the prepared statement. *

* * @param sourceDataSource the data source from which to copy data * @param selectSql the SQL query to select data from the source data source * @param fetchSize it should be bigger than {@code batchSize}. It can be x times {@code batchSize}, depends on how big one record is and how much memory is available. * @param targetDataSource the data source to which to copy data * @param insertSql the SQL query to insert data into the target data source * @param batchSize the number of rows to copy in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param stmtSetter a bi-consumer to set parameters on the prepared statement * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final javax.sql.DataSource sourceDataSource, final String selectSql, final int fetchSize, final javax.sql.DataSource targetDataSource, final String insertSql, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { Connection sourceConn = null; Connection targetConn = null; try { sourceConn = JdbcUtil.getConnection(sourceDataSource); targetConn = JdbcUtil.getConnection(targetDataSource); return copy(sourceConn, selectSql, fetchSize, targetConn, insertSql, batchSize, batchIntervalInMillis, stmtSetter); } finally { if (sourceConn != null) { JdbcUtil.releaseConnection(sourceConn, sourceDataSource); } if (targetConn != null) { JdbcUtil.releaseConnection(targetConn, targetDataSource); } } } /** * Copies data from a source data source to a target data source using the specified table name. *

* This method copies data from the source table to the target table within the same database or across different databases. *

* * @param sourceConn the connection to the source database * @param targetConn the connection to the target database * @param tableName the name of the table to copy data from and to * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final Connection targetConn, final String tableName) throws SQLException { return copy(sourceConn, targetConn, tableName, tableName); } /** * Copies data from a source table to a target table using the specified connections. *

* This method copies data from the source table to the target table within the same database or across different databases. *

* * @param sourceConn the connection to the source database * @param targetConn the connection to the target database * @param sourceTableName the name of the source table to copy data from * @param targetTableName the name of the target table to copy data to * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final Connection targetConn, final String sourceTableName, final String targetTableName) throws SQLException { return copy(sourceConn, targetConn, sourceTableName, targetTableName, JdbcUtil.DEFAULT_BATCH_SIZE); } /** * Copies data from a source table to a target table using the specified connections. *

* This method copies data from the source table to the target table within the same database or across different databases. *

* * @param sourceConn the connection to the source database * @param targetConn the connection to the target database * @param sourceTableName the name of the source table to copy data from * @param targetTableName the name of the target table to copy data to * @param batchSize the number of rows to copy in each batch * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final Connection targetConn, final String sourceTableName, final String targetTableName, final int batchSize) throws SQLException { final String selectSql = JdbcCodeGenerationUtil.generateSelectSql(sourceConn, sourceTableName); String insertSql = JdbcCodeGenerationUtil.generateInsertSql(sourceConn, sourceTableName); if (!sourceTableName.equals(targetTableName)) { insertSql = Strings.replaceFirstIgnoreCase(insertSql, sourceTableName, targetTableName); } return copy(sourceConn, selectSql, N.max(JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, batchSize), targetConn, insertSql, batchSize); } /** * Copies data from a source table to a target table using the specified connections and column names. *

* This method copies data from the source table to the target table within the same database or across different databases. *

* * @param sourceConn the connection to the source database * @param targetConn the connection to the target database * @param sourceTableName the name of the source table to copy data from * @param targetTableName the name of the target table to copy data to * @param selectColumnNames the collection of column names to be copied * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final Connection targetConn, final String sourceTableName, final String targetTableName, final Collection selectColumnNames) throws SQLException { return copy(sourceConn, targetConn, sourceTableName, targetTableName, selectColumnNames, JdbcUtil.DEFAULT_BATCH_SIZE); } /** * Copies data from a source table to a target table using the specified connections, column names, and batch size. *

* This method copies data from the source table to the target table within the same database or across different databases. *

* * @param sourceConn the connection to the source database * @param targetConn the connection to the target database * @param sourceTableName the name of the source table to copy data from * @param targetTableName the name of the target table to copy data to * @param selectColumnNames the collection of column names to be copied * @param batchSize the number of rows to be copied in each batch * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final Connection targetConn, final String sourceTableName, final String targetTableName, final Collection selectColumnNames, final int batchSize) throws SQLException { final String selectSql = generateSelectSql(sourceConn, sourceTableName, selectColumnNames); String insertSql = generateInsertSql(sourceConn, sourceTableName, selectColumnNames); if (!sourceTableName.equals(targetTableName)) { insertSql = Strings.replaceFirstIgnoreCase(insertSql, sourceTableName, targetTableName); } return copy(sourceConn, selectSql, N.max(JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, batchSize), targetConn, insertSql, batchSize); } private static String generateSelectSql(final Connection conn, final String tableName, final Collection selectColumnNames) { if (N.isEmpty(selectColumnNames)) { return JdbcCodeGenerationUtil.generateSelectSql(conn, tableName); } final StringBuilder sb = new StringBuilder(); sb.append(WD.SELECT).append(WD._SPACE); final Iterator iter = selectColumnNames.iterator(); final int lastIdx = selectColumnNames.size() - 1; int cnt = 0; while (iter.hasNext() && cnt++ < lastIdx) { sb.append(iter.next()).append(WD.COMMA_SPACE); } sb.append(iter.next()).append(WD._SPACE).append(WD.FROM).append(WD._SPACE).append(tableName); return sb.toString(); } private static String generateInsertSql(final Connection conn, final String tableName, final Collection selectColumnNames) { if (N.isEmpty(selectColumnNames)) { return JdbcCodeGenerationUtil.generateInsertSql(conn, tableName); } final StringBuilder sb = new StringBuilder(); sb.append(WD.INSERT).append(WD._SPACE).append(WD.INTO).append(WD._SPACE).append(tableName).append(WD._PARENTHESES_L); final Iterator iter = selectColumnNames.iterator(); final int lastIdx = selectColumnNames.size() - 1; int cnt = 0; while (iter.hasNext() && cnt++ < lastIdx) { sb.append(iter.next()).append(WD.COMMA_SPACE); } sb.append(iter.next()) .append(WD._PARENTHESES_R) .append(WD._SPACE) .append(WD.VALUES) .append(WD._SPACE) .append(Strings.repeat("?", selectColumnNames.size(), ", ", "(", ")")); return sb.toString(); } /** * Copies data from a source database to a target database using the specified SQL queries. *

* This method executes the provided select SQL on the source connection and inserts the results into the target connection using the provided insert SQL. *

* * @param sourceConn the connection to the source database * @param selectSql the SQL query to select data from the source database * @param targetConn the connection to the target database * @param insertSql the SQL query to insert data into the target database * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final String selectSql, final Connection targetConn, final String insertSql) throws SQLException { return copy(sourceConn, selectSql, JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, targetConn, insertSql, JdbcUtil.DEFAULT_BATCH_SIZE); } /** * Copies data from a source database to a target database using the specified SQL queries. *

* This method executes the provided select SQL on the source connection and inserts the results into the target connection using the provided insert SQL. *

* * @param sourceConn the connection to the source database * @param selectSql the SQL query to select data from the source database * @param fetchSize the number of rows to fetch at a time from the source database; should be larger than batchSize * @param targetConn the connection to the target database * @param insertSql the SQL query to insert data into the target database * @param batchSize the number of rows to be copied in each batch * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final String selectSql, final int fetchSize, final Connection targetConn, final String insertSql, final int batchSize) throws SQLException { return copy(sourceConn, selectSql, fetchSize, targetConn, insertSql, batchSize, 0, supplierOfStmtSetterByRS.get()); } /** * Copies data from a source database to a target database using the specified SQL queries and a custom statement setter. *

* This method executes the provided select SQL on the source connection and inserts the results into the target connection using the provided insert SQL. * The custom statement setter is used to set the parameters of the prepared statement. *

* * @param sourceConn the connection to the source database * @param selectSql the SQL query to select data from the source database * @param targetConn the connection to the target database * @param insertSql the SQL query to insert data into the target database * @param stmtSetter the custom statement setter to set the parameters of the prepared statement * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final String selectSql, final Connection targetConn, final String insertSql, final Throwables.BiConsumer stmtSetter) throws SQLException { return copy(sourceConn, selectSql, JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, targetConn, insertSql, JdbcUtil.DEFAULT_BATCH_SIZE, 0, stmtSetter); } /** * Copies data from a source database to a target database using the specified SQL queries. *

* This method executes the provided select SQL on the source connection and inserts the results into the target connection using the provided insert SQL. * The custom statement setter is used to set the parameters of the prepared statement. *

* * @param sourceConn the connection to the source database * @param selectSql the SQL query to select data from the source database * @param fetchSize the number of rows to fetch at a time from the source database; should be larger than batchSize * @param targetConn the connection to the target database * @param insertSql the SQL query to insert data into the target database * @param batchSize the number of rows to be copied in each batch * @param batchIntervalInMillis the interval in milliseconds between each batch * @param stmtSetter the custom statement setter to set the parameters of the prepared statement * @return the number of rows copied * @throws SQLException if a database access error occurs */ public static long copy(final Connection sourceConn, final String selectSql, final int fetchSize, final Connection targetConn, final String insertSql, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { PreparedStatement selectStmt = null; PreparedStatement insertStmt = null; final int result = 0; try { selectStmt = JdbcUtil.prepareStatement(sourceConn, selectSql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); setFetchForBigResult(sourceConn, selectStmt, fetchSize); insertStmt = JdbcUtil.prepareStatement(targetConn, insertSql); copy(selectStmt, insertStmt, batchSize, batchIntervalInMillis, stmtSetter); } finally { JdbcUtil.closeQuietly(selectStmt); JdbcUtil.closeQuietly(insertStmt); } return result; } /** * Copies data selected from by {@code selectStmt} to a target data source inserted by {@code insertStmt}. * * @param selectStmt the PreparedStatement used to select data from the source. * @param insertStmt the PreparedStatement used to insert data into the target. * @param batchSize the number of rows to process in each batch. * @param batchIntervalInMillis the interval in milliseconds between each batch. * @param stmtSetter a BiConsumer that sets the parameters for the PreparedStatement from the `ResultSet`. * @return the number of rows copied. * @throws SQLException if a database access error occurs. */ public static long copy(final PreparedStatement selectStmt, final PreparedStatement insertStmt, final int batchSize, final long batchIntervalInMillis, final Throwables.BiConsumer stmtSetter) throws SQLException { // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", batchSize, batchIntervalInMillis); final Throwables.BiConsumer stmtSetterForInsert = N.defaultIfNull(stmtSetter, supplierOfStmtSetterByRS); final PreparedQuery preparedQueryForInsert = new PreparedQuery(insertStmt); ResultSet rs = null; try { rs = JdbcUtil.executeQuery(selectStmt); long cnt = 0; while (rs.next()) { cnt++; stmtSetterForInsert.accept(preparedQueryForInsert, rs); insertStmt.addBatch(); if (cnt % batchSize == 0) { JdbcUtil.executeBatch(insertStmt); if (batchIntervalInMillis > 0) { N.sleep(batchIntervalInMillis); } } } if (cnt % batchSize > 0) { JdbcUtil.executeBatch(insertStmt); } // insertStmt.clearBatch(); // clearBatch() is called in JdbcUtil.executeBatch(insertStmt) return cnt; } finally { JdbcUtil.closeQuietly(rs); } } // private static final Supplier supplierOfRowExtractor = new Supplier<>() { // @Override // public RowExtractor get() { // return new Jdbc.RowExtractor() { // private int columnCount = 0; // // @Override // public void accept(final ResultSet rs, final Object[] outputRow) throws SQLException { // if (columnCount == 0) { // columnCount = rs.getMetaData().getColumnCount(); // } // // for (int i = 0; i < columnCount; i++) { // outputRow[i] = rs.getObject(i + 1); // } // } // }; // } // }; // /** // * Starts another thread to read the records from {@code ResultSet} meanwhile run batch insert in current thread in parallel. // * // * @param sourceDataSource // * @param selectSql // * @param targetDataSource // * @param insertSql // * @param stmtSetter // * @return // * @throws SQLException // */ // public static long copyInParallel(final javax.sql.DataSource sourceDataSource, final String selectSql, final javax.sql.DataSource targetDataSource, // final String insertSql, final Throwables.BiConsumer stmtSetter) throws SQLException { // return copyInParallel(sourceDataSource, selectSql, JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, targetDataSource, insertSql, JdbcUtil.DEFAULT_BATCH_SIZE, // stmtSetter); // } // // /** // * Starts another thread to read the records from {@code ResultSet} meanwhile run batch insert in current thread in parallel. // * // * @param sourceDataSource // * @param selectSql // * @param fetchSize it should be bigger than {@code batchSize}. It can be x times {@code batchSize}, depends on how big one record is and how much memory is available. // * @param targetDataSource // * @param insertSql // * @param batchSize // * @param inParallel // * @param stmtSetter // * @return // * @throws SQLException // */ // public static long copyInParallel(final javax.sql.DataSource sourceDataSource, final String selectSql, final int fetchSize, // final javax.sql.DataSource targetDataSource, final String insertSql, final int batchSize, // final Throwables.BiConsumer stmtSetter) throws SQLException { // Connection sourceConn = null; // Connection targetConn = null; // // try { // sourceConn = JdbcUtil.getConnection(sourceDataSource); // targetConn = JdbcUtil.getConnection(targetDataSource); // // return copyInParallel(sourceConn, selectSql, fetchSize, targetConn, insertSql, batchSize, stmtSetter); // } finally { // if (sourceConn != null) { // JdbcUtil.releaseConnection(sourceConn, sourceDataSource); // } // // if (targetConn != null) { // JdbcUtil.releaseConnection(targetConn, targetDataSource); // } // } // } // // /** // * Starts another thread to read the records from {@code ResultSet} meanwhile run batch insert in current thread in parallel. // * // * @param sourceConn // * @param selectSql // * @param targetConn // * @param insertSql // * @param stmtSetter // * @return // * @throws SQLException // */ // public static long copyInParallel(final Connection sourceConn, final String selectSql, final Connection targetConn, final String insertSql, // final Throwables.BiConsumer stmtSetter) throws SQLException { // return copyInParallel(sourceConn, selectSql, JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT, targetConn, insertSql, JdbcUtil.DEFAULT_BATCH_SIZE, // stmtSetter); // } // // /** // * Starts another thread to read the records from {@code ResultSet} meanwhile run batch insert in current thread in parallel. // * // * @param sourceConn // * @param selectSql // * @param fetchSize it should be bigger than {@code batchSize}. It can be x times {@code batchSize}, depends on how big one record is and how much memory is available. // * @param targetConn // * @param insertSql // * @param batchSize // * @param stmtSetter // * @return // * @throws SQLException // */ // public static long copyInParallel(final Connection sourceConn, final String selectSql, final int fetchSize, final Connection targetConn, // final String insertSql, final int batchSize, final Throwables.BiConsumer stmtSetter) // throws SQLException { // return copyInParallel(sourceConn, selectSql, fetchSize, 0, Long.MAX_VALUE, targetConn, insertSql, batchSize, 0, stmtSetter); // } // // /** // * Starts another thread to read the records from {@code ResultSet} meanwhile run batch insert in current thread in parallel. // * // * @param sourceConn // * @param selectSql // * @param fetchSize it should be bigger than {@code batchSize}. It can be x times {@code batchSize}, depends on how big one record is and how much memory is available. // * @param offset // * @param count // * @param targetConn // * @param insertSql // * @param batchSize // * @param batchIntervalInMillis // * @param stmtSetter // * @return // * @throws SQLException // */ // public static long copyInParallel(final Connection sourceConn, final String selectSql, final int fetchSize, // final Connection targetConn, final String insertSql, final int batchSize, final long batchIntervalInMillis, // final Throwables.BiConsumer stmtSetter) throws SQLException { // PreparedStatement selectStmt = null; // PreparedStatement insertStmt = null; // // int result = 0; // // try { // selectStmt = JdbcUtil.prepareStatement(sourceConn, selectSql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); // selectStmt.setFetchSize(fetchSize); // // insertStmt = JdbcUtil.prepareStatement(targetConn, insertSql); // // copyInParallel(selectStmt, insertStmt, batchSize, batchIntervalInMillis, Jdbc.BiRowMapper.TO_ARRAY, stmtSetter); // } finally { // JdbcUtil.closeQuietly(selectStmt); // JdbcUtil.closeQuietly(insertStmt); // } // // return result; // } // // /** // * // * @param selectStmt // * @param insertStmt // * @param stmtSetter // * @return // * @throws SQLException // */ // public static long copyInParallel(final PreparedStatement selectStmt, final PreparedStatement insertStmt, // final Throwables.BiConsumer stmtSetter) throws SQLException { // return copyInParallel(selectStmt, insertStmt, Jdbc.BiRowMapper.TO_ARRAY, stmtSetter); // } // // /** // * // * @param selectStmt // * @param insertStmt // * @param rowMapper // * @param stmtSetter // * @return // * @throws SQLException // */ // public static long copyInParallel(final PreparedStatement selectStmt, final PreparedStatement insertStmt, final Jdbc.BiRowMapper rowMapper, // final Throwables.BiConsumer stmtSetter) throws SQLException { // return copyInParallel(selectStmt, 0, Long.MAX_VALUE, insertStmt, JdbcUtil.DEFAULT_BATCH_SIZE, 0, rowMapper, stmtSetter); // } // // /** // * // * @param selectStmt // * @param offset // * @param count // * @param insertStmt // * @param batchSize // * @param batchIntervalInMillis // * @param rowMapper // * @param stmtSetter // * @return // * @throws SQLException // */ // public static long copyInParallel(final PreparedStatement selectStmt, final PreparedStatement insertStmt, // final int batchSize, final long batchIntervalInMillis, final Jdbc.BiRowMapper rowMapper, // final Throwables.BiConsumer stmtSetter) throws SQLException { // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can't be negative"); // N.checkArgument(batchSize > 0 && batchIntervalInMillis >= 0, "'batchSize'=%s must be greater than 0 and 'batchIntervalInMillis'=%s can't be negative", // batchSize, batchIntervalInMillis); // // final PreparedQuery preparedQueryForInsert = new PreparedQuery(insertStmt); // // ResultSet rs = null; // // try { // rs = JdbcUtil.executeQuery(selectStmt); // // if (offset > 0) { // JdbcUtil.skip(rs, offset); // } // // long cnt = 0; // // while (cnt < count && rs.next()) { // cnt++; // // stmtSetter.accept(preparedQueryForInsert, rowMapper.apply(rs)); // TODO // insertStmt.addBatch(); // // if (cnt % batchSize == 0) { // JdbcUtil.executeBatch(insertStmt); // // if (batchIntervalInMillis > 0) { // N.sleep(batchIntervalInMillis); // } // } // } // // if (cnt % batchSize > 0) { // JdbcUtil.executeBatch(insertStmt); // } // // // insertStmt.clearBatch(); // clearBatch() is called in JdbcUtil.executeBatch(insertStmt) // // return cnt; // } finally { // JdbcUtil.closeQuietly(rs); // } // } // /** // * // * @param // * @param conn // * @param sql // * @param rowParser // * @throws SQLException // * @throws E // */ // public static void parse(final Connection conn, final String sql, final Throwables.Consumer rowParser) // throws SQLException, E { // parse(conn, sql, rowParser, Fn.emptyAction()); // } // // /** // * // * @param // * @param // * @param conn // * @param sql // * @param rowParser // * @param onComplete // * @throws SQLException // * @throws E // * @throws E2 // */ // public static void parse(final Connection conn, final String sql, // final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) throws SQLException, E, E2 { // parse(conn, sql, 0, Long.MAX_VALUE, rowParser, onComplete); // } // // /** // * // * @param // * @param conn // * @param sql // * @param offset // * @param count // * @param rowParser // * @throws SQLException // * @throws E // */ // public static void parse(final Connection conn, final String sql, // final Throwables.Consumer rowParser) throws SQLException, E { // parse(conn, sql, rowParser, Fn.emptyAction()); // } // // /** // * // * @param // * @param // * @param conn // * @param sql // * @param offset // * @param count // * @param rowParser // * @param onComplete // * @throws SQLException // * @throws E // * @throws E2 // */ // public static void parse(final Connection conn, final String sql, // final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) throws SQLException, E, E2 { // parse(conn, sql, 0, 0, rowParser, onComplete); // } // // /** // * // * @param // * @param conn // * @param sql // * @param offset // * @param count // * @param processThreadNum // * @param queueSize // * @param rowParser // * @throws SQLException // * @throws E // */ // public static void parse(final Connection conn, final String sql, final int processThreadNum, // final int queueSize, final Throwables.Consumer rowParser) throws SQLException, E { // parse(conn, sql, processThreadNum, queueSize, rowParser, Fn.emptyAction()); // } // // /** // * Parse the ResultSet obtained by executing query with the specified Connection and sql. // * // * @param // * @param // * @param conn // * @param sql // * @param offset // * @param count // * @param processThreadNum new threads started to parse/process the lines/records // * @param queueSize size of queue to save the processing records/lines loaded from source data. Default size is 1024. // * @param rowParser // * @param onComplete // * @throws SQLException // * @throws E // * @throws E2 // */ // public static void parse(final Connection conn, final String sql, // final int processThreadNum, final int queueSize, final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) // throws SQLException, E, E2 { // try (PreparedStatement stmt = JdbcUtil.prepareStatement(conn, sql)) { // // setFetchForBigResult(conn, stmt); // // parse(stmt, processThreadNum, queueSize, rowParser, onComplete); // } // } // // /** // * // * @param // * @param stmt // * @param rowParser // * @throws SQLException // * @throws E // */ // public static void parse(final PreparedStatement stmt, final Throwables.Consumer rowParser) throws SQLException, E { // parse(stmt, rowParser, Fn.emptyAction()); // } // // /** // * // * @param // * @param // * @param stmt // * @param rowParser // * @param onComplete // * @throws SQLException // * @throws E // * @throws E2 // */ // public static void parse(final PreparedStatement stmt, final Throwables.Consumer rowParser, // final Throwables.Runnable onComplete) throws SQLException, E, E2 { // parse(stmt, 0, Long.MAX_VALUE, rowParser, onComplete); // } // // /** // * // * @param // * @param stmt // * @param offset // * @param count // * @param rowParser // * @throws SQLException // * @throws E // */ // public static void parse(final PreparedStatement stmt, // final Throwables.Consumer rowParser) throws SQLException, E { // parse(stmt, rowParser, Fn.emptyAction()); // } // // /** // * // * @param // * @param // * @param stmt // * @param offset // * @param count // * @param rowParser // * @param onComplete // * @throws SQLException // * @throws E // * @throws E2 // */ // public static void parse(final PreparedStatement stmt, // final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) throws SQLException, E, E2 { // parse(stmt, 0, 0, rowParser, onComplete); // } // // /** // * // * @param // * @param stmt // * @param offset // * @param count // * @param processThreadNum // * @param queueSize // * @param rowParser // * @throws SQLException // * @throws E // */ // public static void parse(final PreparedStatement stmt, final int processThreadNum, // final int queueSize, final Throwables.Consumer rowParser) throws SQLException, E { // parse(stmt, processThreadNum, queueSize, rowParser, Fn.emptyAction()); // } // // /** // * Parse the ResultSet obtained by executing query with the specified PreparedStatement. // * // * @param // * @param // * @param stmt // * @param offset // * @param count // * @param processThreadNum new threads started to parse/process the lines/records // * @param queueSize size of queue to save the processing records/lines loaded from source data. Default size is 1024. // * @param rowParser // * @param onComplete // * @throws SQLException // * @throws E // * @throws E2 // */ // public static void parse(final PreparedStatement stmt, // final int processThreadNum, final int queueSize, final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) // throws SQLException, E, E2 { // ResultSet rs = null; // // try { // rs = JdbcUtil.executeQuery(stmt); // // parse(rs, processThreadNum, queueSize, rowParser, onComplete); // } finally { // JdbcUtil.closeQuietly(rs); // } // } // // /** // * // * @param // * @param rs // * @param rowParser // * @throws SQLException // * @throws E // */ // public static void parse(final ResultSet rs, final Throwables.Consumer rowParser) throws SQLException, E { // parse(rs, rowParser, Fn.emptyAction()); // } // // /** // * // * @param // * @param // * @param rs // * @param rowParser // * @param onComplete // * @throws SQLException // * @throws E // * @throws E2 // */ // public static void parse(final ResultSet rs, final Throwables.Consumer rowParser, // final Throwables.Runnable onComplete) throws SQLException, E, E2 { // parse(rs, 0, Long.MAX_VALUE, rowParser, onComplete); // } // // /** // * // * @param // * @param rs // * @param offset // * @param count // * @param rowParser // * @throws SQLException // * @throws E // */ // public static void parse(final ResultSet rs, long offset, long count, final Throwables.Consumer rowParser) // throws SQLException, E { // parse(rs, rowParser, Fn.emptyAction()); // } // // /** // * // * @param // * @param // * @param rs // * @param offset // * @param count // * @param rowParser // * @param onComplete // * @throws SQLException // * @throws E // * @throws E2 // */ // public static void parse(final ResultSet rs, long offset, long count, // final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) throws SQLException, E, E2 { // parse(rs, 0, 0, rowParser, onComplete); // } // // /** // * // * @param // * @param rs // * @param offset // * @param count // * @param processThreadNum // * @param queueSize // * @param rowParser // * @throws SQLException // * @throws E // */ // public static void parse(final ResultSet rs, long offset, long count, final int processThreadNum, final int queueSize, // final Throwables.Consumer rowParser) throws SQLException, E { // parse(rs, processThreadNum, queueSize, rowParser, Fn.emptyAction()); // } // // /** // * Parse the ResultSet. // * // * @param // * @param // * @param rs // * @param offset // * @param count // * @param processThreadNum new threads started to parse/process the lines/records // * @param queueSize size of queue to save the processing records/lines loaded from source data. Default size is 1024. // * @param rowParser // * @param onComplete // * @throws E // * @throws E2 // */ // public static void parse(final ResultSet rs, long offset, long count, final int processThreadNum, // final int queueSize, final Throwables.Consumer rowParser, final Throwables.Runnable onComplete) throws E, E2 { // N.checkArgument(offset >= 0 && count >= 0, "'offset'=%s and 'count'=%s can not be negative"); // // Iterators.forEach(iter, processThreadNum, queueSize, elementParser); // } private static void setFetchForBigResult(final Connection conn, final PreparedStatement stmt) throws SQLException { setFetchForBigResult(conn, stmt, JdbcUtil.DEFAULT_FETCH_SIZE_FOR_BIG_RESULT); } private static void setFetchForBigResult(final Connection conn, final PreparedStatement stmt, final int fetchSize) throws SQLException { stmt.setFetchDirection(ResultSet.FETCH_FORWARD); if (JdbcUtil.getDBProductInfo(conn).getVersion().isMySQL()) { stmt.setFetchSize(Integer.MIN_VALUE); } else { stmt.setFetchSize(fetchSize); } } }