All Downloads are FREE. Search and download functionalities are using the official Maven repository.

co.cask.cdap.etl.batch.spark.BasicSparkExecutionPluginContext Maven / Gradle / Ivy

/*
 * Copyright © 2016 Cask Data, Inc.
 *
 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
 * use this file except in compliance with the License. You may obtain a copy of
 * the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 * License for the specific language governing permissions and limitations under
 * the License.
 */

package co.cask.cdap.etl.batch.spark;

import co.cask.cdap.api.data.DatasetContext;
import co.cask.cdap.api.data.DatasetInstantiationException;
import co.cask.cdap.api.data.batch.Split;
import co.cask.cdap.api.dataset.Dataset;
import co.cask.cdap.api.flow.flowlet.StreamEvent;
import co.cask.cdap.api.plugin.PluginContext;
import co.cask.cdap.api.spark.JavaSparkExecutionContext;
import co.cask.cdap.api.stream.StreamEventDecoder;
import co.cask.cdap.etl.api.Lookup;
import co.cask.cdap.etl.api.batch.SparkExecutionPluginContext;
import co.cask.cdap.etl.common.AbstractTransformContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import java.util.Map;
import javax.annotation.Nullable;

/**
 * Implementation of {@link SparkExecutionPluginContext} by delegating to {@link JavaSparkExecutionContext}.
 */
public class BasicSparkExecutionPluginContext extends AbstractTransformContext implements SparkExecutionPluginContext {

  private final JavaSparkExecutionContext sec;
  private final JavaSparkContext jsc;
  private final DatasetContext datasetContext;

  public BasicSparkExecutionPluginContext(JavaSparkExecutionContext sec,
                                          JavaSparkContext jsc, DatasetContext datasetContext,
                                          String stageName) {
    super(sec.getPluginContext(), sec.getMetrics(), null, stageName);
    this.sec = sec;
    this.jsc = jsc;
    this.datasetContext = datasetContext;
  }

  @Override
  public long getLogicalStartTime() {
    return sec.getLogicalStartTime();
  }

  @Override
  public Map getRuntimeArguments() {
    return sec.getRuntimeArguments();
  }

  @Override
  public  JavaPairRDD fromDataset(String datasetName) {
    return sec.fromDataset(datasetName);
  }

  @Override
  public  JavaPairRDD fromDataset(String datasetName, Map arguments) {
    return sec.fromDataset(datasetName, arguments);
  }

  @Override
  public  JavaPairRDD fromDataset(String datasetName, Map arguments,
                                              @Nullable Iterable splits) {
    return sec.fromDataset(datasetName, arguments, splits);
  }

  @Override
  public JavaRDD fromStream(String streamName) {
    return sec.fromStream(streamName);
  }

  @Override
  public JavaRDD fromStream(String streamName, long startTime, long endTime) {
    return sec.fromStream(streamName, startTime, endTime);
  }

  @Override
  public  JavaPairRDD fromStream(String streamName, Class valueType) {
    return sec.fromStream(streamName, valueType);
  }

  @Override
  public  JavaPairRDD fromStream(String streamName, long startTime, long endTime, Class valueType) {
    return sec.fromStream(streamName, startTime, endTime, valueType);
  }

  @Override
  public  JavaPairRDD fromStream(String streamName, long startTime, long endTime,
                                             Class> decoderClass,
                                             Class keyType, Class valueType) {
    return sec.fromStream(streamName, startTime, endTime, decoderClass, keyType, valueType);
  }

  @Override
  public  void saveAsDataset(JavaPairRDD rdd, String datasetName) {
    sec.saveAsDataset(rdd, datasetName);
  }

  @Override
  public  void saveAsDataset(JavaPairRDD rdd, String datasetName, Map arguments) {
    sec.saveAsDataset(rdd, datasetName, arguments);
  }

  @Override
  public JavaSparkContext getSparkContext() {
    return jsc;
  }

  @Override
  public PluginContext getPluginContext() {
    return sec.getPluginContext();
  }

  @Override
  public  T getDataset(String name) throws DatasetInstantiationException {
    return datasetContext.getDataset(name);
  }

  @Override
  public  T getDataset(String name,
                                          Map arguments) throws DatasetInstantiationException {
    return datasetContext.getDataset(name, arguments);
  }

  @Override
  public void releaseDataset(Dataset dataset) {
    datasetContext.releaseDataset(dataset);
  }

  @Override
  public void discardDataset(Dataset dataset) {
    datasetContext.discardDataset(dataset);
  }

  @Override
  public  Lookup provide(String table, Map arguments) {
    throw new UnsupportedOperationException("Lookup not supported in Spark");
  }
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy