io.cdap.plugin.batch.source.BatchReadableSource Maven / Gradle / Ivy
/*
* Copyright © 2015-2019 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.cdap.plugin.batch.source;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import io.cdap.cdap.api.data.batch.Input;
import io.cdap.cdap.api.dataset.DatasetManagementException;
import io.cdap.cdap.api.dataset.DatasetProperties;
import io.cdap.cdap.etl.api.PipelineConfigurer;
import io.cdap.cdap.etl.api.batch.BatchSource;
import io.cdap.cdap.etl.api.batch.BatchSourceContext;
import io.cdap.plugin.common.BatchReadableWritableConfig;
import io.cdap.plugin.common.Properties;
import java.util.Map;
/**
* An abstract source for CDAP BatchReadable Datasets. Extending classes must provide implementation for
* {@link BatchReadableSource#getProperties()} which should return the properties used by the source
*
* @param the type of input key from the Batch job
* @param the type of input value from the Batch job
* @param the type of output for the
*/
public abstract class BatchReadableSource extends BatchSource {
private final BatchReadableWritableConfig batchReadableWritableConfig;
protected BatchReadableSource(BatchReadableWritableConfig batchReadableWritableConfig) {
this.batchReadableWritableConfig = batchReadableWritableConfig;
}
@Override
public void configurePipeline(PipelineConfigurer pipelineConfigurer) {
if (!batchReadableWritableConfig.containsMacro(Properties.BatchReadableWritable.NAME)) {
String datasetName = batchReadableWritableConfig.getName();
Preconditions.checkArgument(datasetName != null && !datasetName.isEmpty(), "Dataset name must be given.");
String datasetType = getProperties().get(Properties.BatchReadableWritable.TYPE);
Preconditions.checkArgument(datasetType != null && !datasetType.isEmpty(), "Dataset type must be given.");
Map properties = Maps.newHashMap(getProperties());
properties.remove(Properties.BatchReadableWritable.NAME);
properties.remove(Properties.BatchReadableWritable.TYPE);
if (!shouldSkipCreateAtConfigure()) {
pipelineConfigurer.createDataset(datasetName, datasetType,
DatasetProperties.builder().addAll(properties).build());
}
}
}
protected boolean shouldSkipCreateAtConfigure() {
return false;
}
/**
* An abstract method which the subclass should override to provide their dataset types
*/
protected abstract Map getProperties();
@Override
public void prepareRun(BatchSourceContext context) throws DatasetManagementException {
Map properties = getProperties();
// if macros were provided at runtime, dataset needs to be created now
if (!context.datasetExists(properties.get(Properties.BatchReadableWritable.NAME))) {
context.createDataset(properties.get(Properties.BatchReadableWritable.NAME),
properties.get(Properties.BatchReadableWritable.TYPE),
DatasetProperties.builder().addAll(properties).build());
}
context.setInput(Input.ofDataset(properties.get(Properties.BatchReadableWritable.NAME)));
}
}