com.azure.cosmos.spark.udf.CreateChangeFeedOffsetFromSpark2.scala Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of azure-cosmos-spark_3-5_2-12 Show documentation
Show all versions of azure-cosmos-spark_3-5_2-12 Show documentation
OLTP Spark 3.5 Connector for Azure Cosmos DB SQL API
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.cosmos.spark.udf
import com.azure.cosmos.implementation.SparkBridgeImplementationInternal
import com.azure.cosmos.spark.{CosmosClientCache, CosmosClientCacheItem, CosmosClientConfiguration, CosmosConfig, CosmosReadConfig, Loan}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.api.java.UDF4
@SerialVersionUID(1L)
class CreateChangeFeedOffsetFromSpark2 extends UDF4[String, String, Map[String, String], Map[Int, Long], String] {
override def call
(
databaseResourceId: String,
containerResourceId: String,
userProvidedConfig: Map[String, String],
tokens: Map[Int, Long]
): String = {
val effectiveUserConfig = CosmosConfig.getEffectiveConfig(None, None, userProvidedConfig)
val readConfig = CosmosReadConfig.parseCosmosReadConfig(effectiveUserConfig)
val cosmosClientConfig = CosmosClientConfiguration(
effectiveUserConfig,
useEventualConsistency = readConfig.forceEventualConsistency,
CosmosClientConfiguration.getSparkEnvironmentInfo(SparkSession.getActiveSession))
Loan(
List[Option[CosmosClientCacheItem]](
Some(CosmosClientCache(
cosmosClientConfig,
None,
s"UDF CreateChangeFeedOffsetFromSpark2"
))
))
.to(cosmosClientCacheItems => {
SparkBridgeImplementationInternal.createChangeFeedOffsetFromSpark2(
cosmosClientCacheItems(0).get.cosmosClient,
databaseResourceId,
containerResourceId,
tokens
)
})
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy