org.apache.flink.table.planner.plan.nodes.physical.stream.StreamPhysicalSink.scala Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of flink-table-planner-blink_2.11 Show documentation
Show all versions of flink-table-planner-blink_2.11 Show documentation
This module bridges Table/SQL API and runtime. It contains
all resources that are required during pre-flight and runtime
phase. The content of this module is work-in-progress. It will
replace flink-table-planner once it is stable. See FLINK-11439
and FLIP-32 for more details.
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.physical.stream
import org.apache.flink.table.catalog.{ObjectIdentifier, ResolvedCatalogTable}
import org.apache.flink.table.connector.sink.DynamicTableSink
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.plan.abilities.sink.SinkAbilitySpec
import org.apache.flink.table.planner.plan.nodes.calcite.Sink
import org.apache.flink.table.planner.plan.nodes.exec.spec.DynamicTableSinkSpec
import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecSink
import org.apache.flink.table.planner.plan.nodes.exec.{ExecNode, InputProperty}
import org.apache.flink.table.planner.plan.utils.{ChangelogPlanUtils, FlinkRelOptUtil}
import org.apache.calcite.plan.{RelOptCluster, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.hint.RelHint
import java.util
/**
* Stream physical RelNode to to write data into an external sink defined by a
* [[DynamicTableSink]].
*/
class StreamPhysicalSink(
cluster: RelOptCluster,
traitSet: RelTraitSet,
inputRel: RelNode,
hints: util.List[RelHint],
tableIdentifier: ObjectIdentifier,
catalogTable: ResolvedCatalogTable,
tableSink: DynamicTableSink,
abilitySpecs: Array[SinkAbilitySpec])
extends Sink(cluster, traitSet, inputRel, hints, tableIdentifier, catalogTable, tableSink)
with StreamPhysicalRel {
override def requireWatermark: Boolean = false
override def copy(traitSet: RelTraitSet, inputs: util.List[RelNode]): RelNode = {
new StreamPhysicalSink(
cluster,
traitSet,
inputs.get(0),
hints,
tableIdentifier,
catalogTable,
tableSink,
abilitySpecs)
}
override def translateToExecNode(): ExecNode[_] = {
val inputChangelogMode = ChangelogPlanUtils.getChangelogMode(
getInput.asInstanceOf[StreamPhysicalRel]).get
val tableSinkSpec = new DynamicTableSinkSpec(
tableIdentifier,
catalogTable,
util.Arrays.asList(abilitySpecs: _*))
tableSinkSpec.setTableSink(tableSink)
val tableConfig = FlinkRelOptUtil.getTableConfigFromContext(this)
tableSinkSpec.setReadableConfig(tableConfig.getConfiguration)
new StreamExecSink(
tableSinkSpec,
inputChangelogMode,
InputProperty.DEFAULT,
FlinkTypeFactory.toLogicalRowType(getRowType),
getRelDetailedDescription
)
}
}