
org.apache.spark.sql.avro.functions.scala Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of databricks-connect Show documentation
Show all versions of databricks-connect Show documentation
Develop locally and connect IDEs, notebook servers and running applications to Databricks clusters.
The newest version!
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.avro
import scala.jdk.CollectionConverters._
import org.apache.spark.annotation.Experimental
import org.apache.spark.sql.Column
import org.apache.spark.sql.functions.{fnWithOptions, lit}
// scalastyle:off: object.name
object functions {
// scalastyle:on: object.name
/**
* Converts a binary column of avro format into its corresponding catalyst value. The specified
* schema must match the read data, otherwise the behavior is undefined: it may fail or return
* arbitrary result.
*
* @param data
* the binary column.
* @param jsonFormatSchema
* the avro schema in JSON string format.
*
* @since 3.5.0
*/
@Experimental
def from_avro(data: Column, jsonFormatSchema: String): Column = {
Column.fn("from_avro", data, lit(jsonFormatSchema))
}
/**
* Converts a binary column of Avro format into its corresponding catalyst value. The specified
* schema must match actual schema of the read data, otherwise the behavior is undefined: it may
* fail or return arbitrary result. To deserialize the data with a compatible and evolved
* schema, the expected Avro schema can be set via the option avroSchema.
*
* @param data
* the binary column.
* @param jsonFormatSchema
* the avro schema in JSON string format.
* @param options
* options to control how the Avro record is parsed.
*
* @since 3.5.0
*/
@Experimental
def from_avro(
data: Column,
jsonFormatSchema: String,
options: java.util.Map[String, String]): Column = {
fnWithOptions("from_avro", options.asScala.iterator, data, lit(jsonFormatSchema))
}
/**
* Converts a column into binary of avro format.
*
* @param data
* the data column.
*
* @since 3.5.0
*/
@Experimental
def to_avro(data: Column): Column = {
Column.fn("to_avro", data)
}
/**
* Converts a column into binary of avro format.
*
* @param data
* the data column.
* @param jsonFormatSchema
* user-specified output avro schema in JSON string format.
*
* @since 3.5.0
*/
@Experimental
def to_avro(data: Column, jsonFormatSchema: String): Column = {
Column.fn("to_avro", data, lit(jsonFormatSchema))
}
// BEGIN-EDGE
/**
* Converts a binary column of Schema-Registry avro format into its corresponding catalyst
* value. The schema of the given subject in Schema-Registry should not change in an
* incompatible way, otherwise exception will be thrown at runtime when Spark consumes data with
* new schema.
*
* @param data
* the binary column.
* @param subject
* the subject in Schema-Registry that these data belong to.
* @param schemaRegistryAddress
* the address(host and port) of Schema-Registry.
*/
@Experimental
def from_avro(data: Column, subject: String, schemaRegistryAddress: String): Column = {
Column.fn("from_avro_schema_registry", data, lit(subject), lit(schemaRegistryAddress))
}
/**
* Converts a binary column of Schema-Registry avro format into its corresponding catalyst
* value. The schema of the given subject in Schema-Registry should not change in an
* incompatible way, otherwise exception will be thrown at runtime when Spark consumes data with
* new schema.
*
* @param data
* the binary column.
* @param subject
* the subject in Schema-Registry that these data belong to.
* @param schemaRegistryAddress
* the address(host and port) of Schema-Registry.
* @param options
* options to control how the Avro record is parsed and configs for schema registry client.
*/
@Experimental
def from_avro(
data: Column,
subject: String,
schemaRegistryAddress: String,
options: java.util.Map[String, String]): Column = {
fnWithOptions(
"from_avro_schema_registry",
options.asScala.iterator,
data,
lit(subject),
lit(schemaRegistryAddress))
}
/**
* Converts a column into binary of Schema-Registry avro format. The input data schema must have
* been registered to the given subject in Schema-Registry, or the query will fail at runtime.
*
* @param data
* the data column.
* @param subject
* the subject in Schema-Registry that these data belong to.
* @param schemaRegistryAddress
* the address(host and port) of Schema-Registry.
*/
@Experimental
def to_avro(data: Column, subject: Column, schemaRegistryAddress: String): Column = {
Column.fn("to_avro_schema_registry", data, subject, lit(schemaRegistryAddress))
}
/**
* Converts a column into binary of Schema-Registry avro format. The input data schema must have
* been registered to the given subject in Schema-Registry, or the query will fail at runtime.
*
* @param data
* the data column.
* @param subject
* the subject in Schema-Registry that these data belong to.
* @param schemaRegistryAddress
* the address(host and port) of Schema-Registry.
* @param options
* options to control how the Avro record is parsed and configs for schema registry client.
*/
@Experimental
def to_avro(
data: Column,
subject: Column,
schemaRegistryAddress: String,
options: java.util.Map[String, String]): Column = {
fnWithOptions(
"to_avro_schema_registry",
options.asScala.iterator,
data,
subject,
lit(schemaRegistryAddress))
}
/**
* Converts a column into binary of Schema-Registry avro format. The input data schema must have
* been registered to the given subject in Schema-Registry, or the query will fail at runtime.
*
* @param data
* the data column.
* @param subject
* the subject in Schema-Registry that these data belong to.
* @param schemaRegistryAddress
* the address(host and port) of Schema-Registry.
* @param jsonFormatSchema
* user-specified output avro schema in JSON string format.
*/
def to_avro(
data: Column,
subject: Column,
schemaRegistryAddress: String,
jsonFormatSchema: String): Column = {
Column.fn(
"to_avro_schema_registry",
data,
subject,
lit(schemaRegistryAddress),
lit(jsonFormatSchema))
}
/**
* Converts a column into binary of Schema-Registry avro format. The input data schema must have
* been registered to the given subject in Schema-Registry, or the query will fail at runtime.
*
* @param data
* the data column.
* @param subject
* the subject in Schema-Registry that these data belong to.
* @param schemaRegistryAddress
* the address(host and port) of Schema-Registry.
* @param options
* options to control how the Avro record is parsed and configs for schema registry client.
* @param jsonFormatSchema
* user-specified output avro schema in JSON string format.
*/
def to_avro(
data: Column,
subject: Column,
schemaRegistryAddress: String,
options: java.util.Map[String, String],
jsonFormatSchema: String): Column = {
fnWithOptions(
"to_avro_schema_registry",
options.asScala.iterator,
data,
subject,
lit(schemaRegistryAddress),
lit(jsonFormatSchema))
}
// END-EDGE
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy