All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.deeplearning4j.spark.data.BatchAndExportMultiDataSetsFunction Maven / Gradle / Ivy

The newest version!
/*
 *  ******************************************************************************
 *  *
 *  *
 *  * This program and the accompanying materials are made available under the
 *  * terms of the Apache License, Version 2.0 which is available at
 *  * https://www.apache.org/licenses/LICENSE-2.0.
 *  *
 *  *  See the NOTICE file distributed with this work for additional
 *  *  information regarding copyright ownership.
 *  * Unless required by applicable law or agreed to in writing, software
 *  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 *  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 *  * License for the specific language governing permissions and limitations
 *  * under the License.
 *  *
 *  * SPDX-License-Identifier: Apache-2.0
 *  *****************************************************************************
 */

package org.deeplearning4j.spark.data;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.broadcast.Broadcast;
import org.datavec.spark.util.DefaultHadoopConfig;
import org.datavec.spark.util.SerializableHadoopConfig;
import org.deeplearning4j.core.util.UIDProvider;
import org.nd4j.linalg.dataset.api.MultiDataSet;
import org.nd4j.common.primitives.Pair;

import java.net.URI;
import java.util.*;

public class BatchAndExportMultiDataSetsFunction
                implements Function2, Iterator> {

    private final int minibatchSize;
    private final String exportBaseDirectory;
    private final String jvmuid;
    private final Broadcast conf;

    /**
     * @param minibatchSize       Minibatch size to combine examples to (if necessary)
     * @param exportBaseDirectory Base directory for exporting
     */
    public BatchAndExportMultiDataSetsFunction(int minibatchSize, String exportBaseDirectory) {
        this(minibatchSize, exportBaseDirectory, null);
    }

    /**
     * @param minibatchSize       Minibatch size to combine examples to (if necessary)
     * @param exportBaseDirectory Base directory for exporting
     * @param configuration       Hadoop Configuration
     */
    public BatchAndExportMultiDataSetsFunction(int minibatchSize, String exportBaseDirectory, Broadcast configuration) {
        this.minibatchSize = minibatchSize;
        this.exportBaseDirectory = exportBaseDirectory;
        String fullUID = UIDProvider.getJVMUID();
        this.jvmuid = (fullUID.length() <= 8 ? fullUID : fullUID.substring(0, 8));
        this.conf = configuration;
    }

    @Override
    public Iterator call(Integer partitionIdx, Iterator iterator) throws Exception {

        List outputPaths = new ArrayList<>();
        LinkedList tempList = new LinkedList<>();

        int count = 0;
        while (iterator.hasNext()) {
            MultiDataSet next = iterator.next();
            if (next.getFeatures(0).size(0) == minibatchSize) {
                outputPaths.add(export(next, partitionIdx, count++));
                continue;
            }
            //DataSet must be either smaller or larger than minibatch size...
            tempList.add(next);
            Pair> countAndPaths = processList(tempList, partitionIdx, count, false);
            if (countAndPaths.getSecond() != null && !countAndPaths.getSecond().isEmpty()) {
                outputPaths.addAll(countAndPaths.getSecond());
            }
            count = countAndPaths.getFirst();
        }

        //We might have some left-over examples...
        Pair> countAndPaths = processList(tempList, partitionIdx, count, true);
        if (countAndPaths.getSecond() != null && !countAndPaths.getSecond().isEmpty()) {
            outputPaths.addAll(countAndPaths.getSecond());
        }

        return outputPaths.iterator();
    }

    private Pair> processList(LinkedList tempList, int partitionIdx,
                    int countBefore, boolean finalExport) throws Exception {
        //Go through the list. If we have enough examples: remove the DataSet objects, merge and export them. Otherwise: do nothing
        int numExamples = 0;
        for (MultiDataSet ds : tempList) {
            numExamples += ds.getFeatures(0).size(0);
        }

        if (tempList.isEmpty() || (numExamples < minibatchSize && !finalExport)) {
            //No op
            return new Pair<>(countBefore, Collections.emptyList());
        }

        List exportPaths = new ArrayList<>();

        int countAfter = countBefore;

        //Batch the required number together
        int countSoFar = 0;
        List tempToMerge = new ArrayList<>();
        while (!tempList.isEmpty() && countSoFar != minibatchSize) {
            MultiDataSet next = tempList.removeFirst();
            if (countSoFar + next.getFeatures(0).size(0) <= minibatchSize) {
                //Add the entire DataSet object
                tempToMerge.add(next);
                countSoFar += next.getFeatures(0).size(0);
            } else {
                //Split the DataSet
                List examples = next.asList();
                for (MultiDataSet ds : examples) {
                    tempList.addFirst(ds);
                }
            }
        }
        //At this point: we should have the required number of examples in tempToMerge (unless it's a final export)
        MultiDataSet toExport = org.nd4j.linalg.dataset.MultiDataSet.merge(tempToMerge);
        exportPaths.add(export(toExport, partitionIdx, countAfter++));

        return new Pair<>(countAfter, exportPaths);
    }

    private String export(MultiDataSet dataSet, int partitionIdx, int outputCount) throws Exception {
        String filename = "mds_" + partitionIdx + jvmuid + "_" + outputCount + ".bin";

        URI uri = new URI(exportBaseDirectory
                        + (exportBaseDirectory.endsWith("/") || exportBaseDirectory.endsWith("\\") ? "" : "/")
                        + filename);

        Configuration c = conf == null ? DefaultHadoopConfig.get() : conf.getValue().getConfiguration();

        FileSystem file = FileSystem.get(uri, c);
        try (FSDataOutputStream out = file.create(new Path(uri))) {
            dataSet.save(out);
        }

        return uri.toString();
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy