All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.datacleaner.spark.functions.ValuesToInputRowFunction Maven / Gradle / Ivy

There is a newer version: 6.0.0
Show newest version
/**
 * DataCleaner (community edition)
 * Copyright (C) 2014 Neopost - Customer Information Management
 *
 * This copyrighted material is made available to anyone wishing to use, modify,
 * copy, or redistribute it subject to the terms and conditions of the GNU
 * Lesser General Public License, as published by the Free Software Foundation.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
 * or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License
 * for more details.
 *
 * You should have received a copy of the GNU Lesser General Public License
 * along with this distribution; if not, write to:
 * Free Software Foundation, Inc.
 * 51 Franklin Street, Fifth Floor
 * Boston, MA  02110-1301  USA
 */
package org.datacleaner.spark.functions;

import java.util.List;

import org.apache.metamodel.schema.Table;
import org.apache.spark.api.java.function.Function;
import org.datacleaner.api.InputColumn;
import org.datacleaner.api.InputRow;
import org.datacleaner.data.MockInputRow;
import org.datacleaner.spark.SparkJobContext;

import scala.Tuple2;

/**
 * Mapper function that changes takes Object arrays representing physical
 * records into the format of an {@link InputRow}.
 *
 * Currently this is implemented very strictly by just investigating the column
 * indices of the job's source columns.
 *
 * It is assumed that the job is based on a single source {@link Table}.
 */
public class ValuesToInputRowFunction implements Function, InputRow> {

    private static final long serialVersionUID = 1L;

    private final SparkJobContext _sparkJobContext;

    public ValuesToInputRowFunction(final SparkJobContext sparkJobContext) {
        _sparkJobContext = sparkJobContext;
    }

    @Override
    public InputRow call(final Tuple2 tuple) throws Exception {
        final Object[] values = tuple._1;
        final Long rowNumber = tuple._2;

        final MockInputRow inputRow = new MockInputRow(rowNumber.intValue());
        final List> sourceColumns = _sparkJobContext.getAnalysisJob().getSourceColumns();
        for (final InputColumn sourceColumn : sourceColumns) {
            assert sourceColumn.isPhysicalColumn();
            final int columnIndex = sourceColumn.getPhysicalColumn().getColumnNumber();
            final Object value = values[columnIndex];
            inputRow.put(sourceColumn, value);
        }
        return inputRow;
    }

}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy