All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.alibaba.hologres.spark.sink.BaseHoloDataWriter.scala Maven / Gradle / Ivy

There is a newer version: 1.5.0
Show newest version
package com.alibaba.hologres.spark.sink

import com.alibaba.hologres.client.exception.{HoloClientException, HoloClientWithDetailsException}
import com.alibaba.hologres.client.model.{Record, TableSchema}
import com.alibaba.hologres.client.{HoloClient, Put}
import com.alibaba.hologres.spark.config.HologresConfigs
import com.alibaba.hologres.spark.exception.SparkHoloException
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.types._
import org.slf4j.LoggerFactory

/** BaseHoloJdbcDataWriter. */
abstract class BaseHoloDataWriter(
                                   hologresConfigs: HologresConfigs,
                                   sparkSchema: StructType,
                                   holoSchema: TableSchema) extends Logging {
  private val logger = LoggerFactory.getLogger(getClass)

  logger.debug("create new holo client")
  private val client: HoloClient = new HoloClient(hologresConfigs.holoConfig)

  private val recordLength: Int = sparkSchema.fields.length
  private val columnIdToHoloId: Array[Int] = new Array[Int](recordLength)
  private val fieldWriters: Array[FieldWriter] = {
    val fieldWriters = new Array[FieldWriter](recordLength)
    for (i <- 0 until recordLength) {
      val holoColumnIndex = holoSchema.getColumnIndex(sparkSchema.fields.apply(i).name)
      columnIdToHoloId(i) = holoColumnIndex
      fieldWriters.update(i, FieldWriterUtils.createFieldWriter(holoSchema.getColumn(holoColumnIndex)))
    }
    fieldWriters
  }

  def commit(): Null = {
    logger.debug("Commit....")
    try {
      client.flush()
    }
    catch {
      case e: HoloClientWithDetailsException =>
        var i = 0
        while (i < e.size) {
          val failedRecord: Record = e.getFailRecord(i)
          val cause: HoloClientException = e.getException(i)
          i += 1
          logger.error(s"Upsert data $failedRecord failed, caused by $cause")
        }
        throw new SparkHoloException(e)
      case e: HoloClientException =>
        throw new SparkHoloException(e)
    }
    null
  }

  def write(row: InternalRow): Unit = {
    if (null == row) {
      return
    }
    try {
      val put: Put = new Put(holoSchema)
      convertRowToHologresRecord(put, row)
      client.put(put)
    } catch {
      case e: HoloClientWithDetailsException =>
        var i = 0
        while (i < e.size) {
          val failedRecord: Record = e.getFailRecord(i)
          val cause: HoloClientException = e.getException(i)
          i += 1
          logger.error(s"Upsert data $failedRecord failed, caused by $cause")
        }
        throw new SparkHoloException(e)
      case e: HoloClientException =>
        throw new SparkHoloException(e)
    }
  }

  def abort(): Unit = {
    logger.debug("Abort....")
    close()
  }

  private def convertRowToHologresRecord(put: Put, row: InternalRow): Unit = {
    try {
      for (i <- 0 until recordLength) {
        val columnHoloId = columnIdToHoloId(i)
        if (!row.isNullAt(i)) {
          put.setObject(columnHoloId, fieldWriters.apply(i).writeValue(row, i))
        } else {
          put.setObject(columnHoloId, null)
        }
      }
    } catch {
      case e: Exception =>
        // 打印convert失败的数据行
        logger.error(s"convert spark InternalRow to Hologres Record failed, InternalRow $row, record $put.getRecord")
        throw new SparkHoloException(e)
    }
  }

  protected def close(): Unit = {
    if (client != null) {
      try {
        client.flush()
      } catch {
        case e: HoloClientException =>
          throw new SparkHoloException(e)
      } finally {
        client.close()
      }
    }
    logger.debug("Close....")
  }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy