All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.apache.flink.table.planner.Conversions.scala Maven / Gradle / Ivy

Go to download

The newest version!
/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.flink.table.planner

import org.apache.flink.api.common.functions.MapFunction
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.{GenericTypeInfo, PojoTypeInfo, TupleTypeInfoBase}
import org.apache.flink.table.api.{TableConfig, TableException, TableSchema}
import org.apache.flink.table.codegen.{FunctionCodeGenerator, GeneratedFunction}
import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo
import org.apache.flink.types.Row


object Conversions {

  /**
    * Utility method for generating converter [[MapFunction]] that converts from
    * given input [[TypeInformation]] of type [[Row]] to requested type, based on a
    * logical [[TableSchema]] of the input type.
    */
  def generateRowConverterFunction[OUT](
    physicalInputType: TypeInformation[Row],
    logicalInputSchema: TableSchema,
    requestedOutputType: TypeInformation[OUT],
    functionName: String,
    config: TableConfig)
  : Option[GeneratedFunction[MapFunction[Row, OUT], OUT]] = {

    // validate that at least the field types of physical and logical type match
    // we do that here to make sure that plan translation was correct
    val typeInfo = logicalInputSchema.toRowType
    if (typeInfo != physicalInputType) {
      throw new TableException(
        s"The field types of physical and logical row types do not match. " +
          s"Physical type is [$typeInfo], Logical type is [$physicalInputType]. " +
          s"This is a bug and should not happen. Please file an issue.")
    }

    // generic row needs no conversion
    if (requestedOutputType.isInstanceOf[GenericTypeInfo[_]] &&
      requestedOutputType.getTypeClass == classOf[Row]) {
      return None
    }

    val fieldTypes = logicalInputSchema.getFieldTypes
    val fieldNames = logicalInputSchema.getFieldNames

    // check for valid type info
    if (requestedOutputType.getArity != fieldTypes.length) {
      throw new TableException(
        s"Arity [${fieldTypes.length}] of result [$fieldTypes] does not match " +
          s"the number[${requestedOutputType.getArity}] of requested type [$requestedOutputType].")
    }

    // check requested types

    def validateFieldType(fieldType: TypeInformation[_]): Unit = fieldType match {
      case _: TimeIndicatorTypeInfo =>
        throw new TableException("The time indicator type is an internal type only.")
      case _ => // ok
    }

    requestedOutputType match {
      // POJO type requested
      case pt: PojoTypeInfo[_] =>
        fieldNames.zip(fieldTypes) foreach {
          case (fName, fType) =>
            val pojoIdx = pt.getFieldIndex(fName)
            if (pojoIdx < 0) {
              throw new TableException(s"POJO does not define field name: $fName")
            }
            val requestedTypeInfo = pt.getTypeAt(pojoIdx)
            validateFieldType(requestedTypeInfo)
            if (fType != requestedTypeInfo) {
              throw new TableException(s"Result field does not match requested type. " +
                s"Requested: $requestedTypeInfo; Actual: $fType")
            }
        }

      // Tuple/Case class/Row type requested
      case tt: TupleTypeInfoBase[_] =>
        fieldTypes.zipWithIndex foreach {
          case (fieldTypeInfo, i) =>
            val requestedTypeInfo = tt.getTypeAt(i)
            validateFieldType(requestedTypeInfo)
            if (fieldTypeInfo != requestedTypeInfo) {
              throw new TableException(s"Result field does not match requested type. " +
                s"Requested: $requestedTypeInfo; Actual: $fieldTypeInfo")
            }
        }

      // atomic type requested
      case t: TypeInformation[_] =>
        if (fieldTypes.size != 1) {
          throw new TableException(s"Requested result type is an atomic type but " +
            s"result[$fieldTypes] has more or less than a single field.")
        }
        val requestedTypeInfo = fieldTypes.head
        validateFieldType(requestedTypeInfo)
        if (requestedTypeInfo != t) {
          throw new TableException(s"Result field does not match requested type. " +
            s"Requested: $t; Actual: $requestedTypeInfo")
        }

      case _ =>
        throw new TableException(s"Unsupported result type: $requestedOutputType")
    }

    // code generate MapFunction
    val generator = new FunctionCodeGenerator(
      config,
      false,
      physicalInputType,
      None,
      None)

    val conversion = generator.generateConverterResultExpression(
      requestedOutputType,
      fieldNames)

    val body =
      s"""
         |${conversion.code}
         |return ${conversion.resultTerm};
         |""".stripMargin

    val generated = generator.generateFunction(
      functionName,
      classOf[MapFunction[Row, OUT]],
      body,
      requestedOutputType)

    Some(generated)
  }
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy