All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.hudi.data.HoodieJavaRDD Maven / Gradle / Ivy

The newest version!
/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing,
 * software distributed under the License is distributed on an
 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 * KIND, either express or implied.  See the License for the
 * specific language governing permissions and limitations
 * under the License.
 */

package org.apache.hudi.data;

import org.apache.hudi.client.common.HoodieSparkEngineContext;
import org.apache.hudi.common.data.HoodieData;
import org.apache.hudi.common.data.HoodiePairData;
import org.apache.hudi.common.engine.HoodieEngineContext;
import org.apache.hudi.common.function.SerializableFunction;
import org.apache.hudi.common.function.SerializablePairFunction;
import org.apache.hudi.common.util.collection.MappingIterator;
import org.apache.hudi.common.util.collection.Pair;

import org.apache.spark.Partitioner;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.Optional;
import org.apache.spark.sql.internal.SQLConf;
import org.apache.spark.storage.StorageLevel;

import java.util.Iterator;
import java.util.List;

import scala.Tuple2;

/**
 * Holds a {@link JavaRDD} of objects.
 *
 * @param  type of object.
 */
public class HoodieJavaRDD implements HoodieData {

  private final JavaRDD rddData;

  private HoodieJavaRDD(JavaRDD rddData) {
    this.rddData = rddData;
  }

  /**
   * @param rddData a {@link JavaRDD} of objects in type T.
   * @param      type of object.
   * @return a new instance containing the {@link JavaRDD} reference.
   */
  public static  HoodieJavaRDD of(JavaRDD rddData) {
    return new HoodieJavaRDD<>(rddData);
  }

  /**
   * @param data        a {@link List} of objects in type T.
   * @param context     {@link HoodieSparkEngineContext} to use.
   * @param parallelism parallelism for the {@link JavaRDD}.
   * @param          type of object.
   * @return a new instance containing the {@link JavaRDD} instance.
   */
  public static  HoodieJavaRDD of(
      List data, HoodieSparkEngineContext context, int parallelism) {
    return new HoodieJavaRDD<>(context.getJavaSparkContext().parallelize(data, parallelism));
  }

  /**
   * @param hoodieData {@link HoodieJavaRDD } instance containing the {@link JavaRDD} of objects.
   * @param         type of object.
   * @return the a {@link JavaRDD} of objects in type T.
   */
  public static  JavaRDD getJavaRDD(HoodieData hoodieData) {
    return ((HoodieJavaRDD) hoodieData).rddData;
  }

  public static  JavaPairRDD getJavaRDD(HoodiePairData hoodieData) {
    return ((HoodieJavaPairRDD) hoodieData).get();
  }

  @Override
  public int getId() {
    return rddData.id();
  }

  @Override
  public void persist(String level) {
    rddData.persist(StorageLevel.fromString(level));
  }

  @Override
  public void persist(String level, HoodieEngineContext engineContext, HoodieDataCacheKey cacheKey) {
    engineContext.putCachedDataIds(cacheKey, this.getId());
    rddData.persist(StorageLevel.fromString(level));
  }

  @Override
  public void unpersist() {
    rddData.unpersist();
  }

  @Override
  public boolean isEmpty() {
    return rddData.isEmpty();
  }

  @Override
  public long count() {
    return rddData.count();
  }

  @Override
  public int getNumPartitions() {
    return rddData.getNumPartitions();
  }

  @Override
  public int deduceNumPartitions() {
    // for source rdd, the partitioner is None
    final Optional partitioner = rddData.partitioner();
    if (partitioner.isPresent()) {
      int partPartitions = partitioner.get().numPartitions();
      if (partPartitions > 0) {
        return partPartitions;
      }
    }

    if (SQLConf.get().contains(SQLConf.SHUFFLE_PARTITIONS().key())) {
      return Integer.parseInt(SQLConf.get().getConfString(SQLConf.SHUFFLE_PARTITIONS().key()));
    } else if (rddData.context().conf().contains("spark.default.parallelism")) {
      return rddData.context().defaultParallelism();
    } else {
      return rddData.getNumPartitions();
    }
  }

  @Override
  public  HoodieData map(SerializableFunction func) {
    return HoodieJavaRDD.of(rddData.map(func::apply));
  }

  @Override
  public  HoodieData mapPartitions(SerializableFunction, Iterator> func, boolean preservesPartitioning) {
    return HoodieJavaRDD.of(rddData.mapPartitions(func::apply, preservesPartitioning));
  }

  @Override
  public  HoodieData flatMap(SerializableFunction> func) {
    // NOTE: Unrolling this lambda into a method reference results in [[ClassCastException]]
    //       due to weird interop b/w Scala and Java
    return HoodieJavaRDD.of(rddData.flatMap(e -> func.apply(e)));
  }

  @Override
  public  HoodiePairData flatMapToPair(SerializableFunction>> func) {
    return HoodieJavaPairRDD.of(
        rddData.flatMapToPair(e ->
            new MappingIterator<>(func.apply(e), p -> new Tuple2<>(p.getKey(), p.getValue()))));
  }

  @Override
  public  HoodiePairData mapToPair(SerializablePairFunction func) {
    return HoodieJavaPairRDD.of(rddData.mapToPair(input -> {
      Pair pair = func.call(input);
      return new Tuple2<>(pair.getLeft(), pair.getRight());
    }));
  }

  @Override
  public HoodieData distinct() {
    return HoodieJavaRDD.of(rddData.distinct());
  }

  @Override
  public HoodieData distinct(int parallelism) {
    return HoodieJavaRDD.of(rddData.distinct(parallelism));
  }

  @Override
  public HoodieData filter(SerializableFunction filterFunc) {
    return HoodieJavaRDD.of(rddData.filter(filterFunc::apply));
  }

  @Override
  public HoodieData union(HoodieData other) {
    return HoodieJavaRDD.of(rddData.union(((HoodieJavaRDD) other).rddData));
  }

  @Override
  public List collectAsList() {
    return rddData.collect();
  }

  @Override
  public HoodieData repartition(int parallelism) {
    return HoodieJavaRDD.of(rddData.repartition(parallelism));
  }
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy