All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.awsnative.appflow.kotlin.inputs.FlowSalesforceDestinationPropertiesArgs.kt Maven / Gradle / Ivy

@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.awsnative.appflow.kotlin.inputs

import com.pulumi.awsnative.appflow.inputs.FlowSalesforceDestinationPropertiesArgs.builder
import com.pulumi.awsnative.appflow.kotlin.enums.FlowDataTransferApi
import com.pulumi.awsnative.appflow.kotlin.enums.FlowWriteOperationType
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiNullFieldException
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.jvm.JvmName

/**
 *
 * @property dataTransferApi Specifies which Salesforce API is used by Amazon AppFlow when your flow transfers data to Salesforce.
 * - **AUTOMATIC** - The default. Amazon AppFlow selects which API to use based on the number of records that your flow transfers to Salesforce. If your flow transfers fewer than 1,000 records, Amazon AppFlow uses Salesforce REST API. If your flow transfers 1,000 records or more, Amazon AppFlow uses Salesforce Bulk API 2.0.
 * Each of these Salesforce APIs structures data differently. If Amazon AppFlow selects the API automatically, be aware that, for recurring flows, the data output might vary from one flow run to the next. For example, if a flow runs daily, it might use REST API on one day to transfer 900 records, and it might use Bulk API 2.0 on the next day to transfer 1,100 records. For each of these flow runs, the respective Salesforce API formats the data differently. Some of the differences include how dates are formatted and null values are represented. Also, Bulk API 2.0 doesn't transfer Salesforce compound fields.
 * By choosing this option, you optimize flow performance for both small and large data transfers, but the tradeoff is inconsistent formatting in the output.
 * - **BULKV2** - Amazon AppFlow uses only Salesforce Bulk API 2.0. This API runs asynchronous data transfers, and it's optimal for large sets of data. By choosing this option, you ensure that your flow writes consistent output, but you optimize performance only for large data transfers.
 * Note that Bulk API 2.0 does not transfer Salesforce compound fields.
 * - **REST_SYNC** - Amazon AppFlow uses only Salesforce REST API. By choosing this option, you ensure that your flow writes consistent output, but you decrease performance for large data transfers that are better suited for Bulk API 2.0. In some cases, if your flow attempts to transfer a vary large set of data, it might fail with a timed out error.
 * @property errorHandlingConfig The settings that determine how Amazon AppFlow handles an error when placing data in the Salesforce destination. For example, this setting would determine if the flow should fail after one insertion error, or continue and attempt to insert every record regardless of the initial failure. `ErrorHandlingConfig` is a part of the destination connector details.
 * @property idFieldNames List of fields used as ID when performing a write operation.
 * @property object The object specified in the Salesforce flow destination.
 * @property writeOperationType This specifies the type of write operation to be performed in Salesforce. When the value is `UPSERT` , then `idFieldNames` is required.
 */
public data class FlowSalesforceDestinationPropertiesArgs(
    public val dataTransferApi: Output? = null,
    public val errorHandlingConfig: Output? = null,
    public val idFieldNames: Output>? = null,
    public val `object`: Output,
    public val writeOperationType: Output? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.awsnative.appflow.inputs.FlowSalesforceDestinationPropertiesArgs = com.pulumi.awsnative.appflow.inputs.FlowSalesforceDestinationPropertiesArgs.builder()
        .dataTransferApi(dataTransferApi?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
        .errorHandlingConfig(
            errorHandlingConfig?.applyValue({ args0 ->
                args0.let({ args0 ->
                    args0.toJava()
                })
            }),
        )
        .idFieldNames(idFieldNames?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
        .`object`(`object`.applyValue({ args0 -> args0 }))
        .writeOperationType(
            writeOperationType?.applyValue({ args0 ->
                args0.let({ args0 ->
                    args0.toJava()
                })
            }),
        ).build()
}

/**
 * Builder for [FlowSalesforceDestinationPropertiesArgs].
 */
@PulumiTagMarker
public class FlowSalesforceDestinationPropertiesArgsBuilder internal constructor() {
    private var dataTransferApi: Output? = null

    private var errorHandlingConfig: Output? = null

    private var idFieldNames: Output>? = null

    private var `object`: Output? = null

    private var writeOperationType: Output? = null

    /**
     * @param value Specifies which Salesforce API is used by Amazon AppFlow when your flow transfers data to Salesforce.
     * - **AUTOMATIC** - The default. Amazon AppFlow selects which API to use based on the number of records that your flow transfers to Salesforce. If your flow transfers fewer than 1,000 records, Amazon AppFlow uses Salesforce REST API. If your flow transfers 1,000 records or more, Amazon AppFlow uses Salesforce Bulk API 2.0.
     * Each of these Salesforce APIs structures data differently. If Amazon AppFlow selects the API automatically, be aware that, for recurring flows, the data output might vary from one flow run to the next. For example, if a flow runs daily, it might use REST API on one day to transfer 900 records, and it might use Bulk API 2.0 on the next day to transfer 1,100 records. For each of these flow runs, the respective Salesforce API formats the data differently. Some of the differences include how dates are formatted and null values are represented. Also, Bulk API 2.0 doesn't transfer Salesforce compound fields.
     * By choosing this option, you optimize flow performance for both small and large data transfers, but the tradeoff is inconsistent formatting in the output.
     * - **BULKV2** - Amazon AppFlow uses only Salesforce Bulk API 2.0. This API runs asynchronous data transfers, and it's optimal for large sets of data. By choosing this option, you ensure that your flow writes consistent output, but you optimize performance only for large data transfers.
     * Note that Bulk API 2.0 does not transfer Salesforce compound fields.
     * - **REST_SYNC** - Amazon AppFlow uses only Salesforce REST API. By choosing this option, you ensure that your flow writes consistent output, but you decrease performance for large data transfers that are better suited for Bulk API 2.0. In some cases, if your flow attempts to transfer a vary large set of data, it might fail with a timed out error.
     */
    @JvmName("otucwtbfglqoxmwx")
    public suspend fun dataTransferApi(`value`: Output) {
        this.dataTransferApi = value
    }

    /**
     * @param value The settings that determine how Amazon AppFlow handles an error when placing data in the Salesforce destination. For example, this setting would determine if the flow should fail after one insertion error, or continue and attempt to insert every record regardless of the initial failure. `ErrorHandlingConfig` is a part of the destination connector details.
     */
    @JvmName("tvgdbhbvnfshipac")
    public suspend fun errorHandlingConfig(`value`: Output) {
        this.errorHandlingConfig = value
    }

    /**
     * @param value List of fields used as ID when performing a write operation.
     */
    @JvmName("kbcmphgftwjmtajp")
    public suspend fun idFieldNames(`value`: Output>) {
        this.idFieldNames = value
    }

    @JvmName("pdxugblpqtxbksbs")
    public suspend fun idFieldNames(vararg values: Output) {
        this.idFieldNames = Output.all(values.asList())
    }

    /**
     * @param values List of fields used as ID when performing a write operation.
     */
    @JvmName("hjkxlvgnrkfmemkq")
    public suspend fun idFieldNames(values: List>) {
        this.idFieldNames = Output.all(values)
    }

    /**
     * @param value The object specified in the Salesforce flow destination.
     */
    @JvmName("aukuegvxtutwmvxw")
    public suspend fun `object`(`value`: Output) {
        this.`object` = value
    }

    /**
     * @param value This specifies the type of write operation to be performed in Salesforce. When the value is `UPSERT` , then `idFieldNames` is required.
     */
    @JvmName("aesnphgtjtxmgfka")
    public suspend fun writeOperationType(`value`: Output) {
        this.writeOperationType = value
    }

    /**
     * @param value Specifies which Salesforce API is used by Amazon AppFlow when your flow transfers data to Salesforce.
     * - **AUTOMATIC** - The default. Amazon AppFlow selects which API to use based on the number of records that your flow transfers to Salesforce. If your flow transfers fewer than 1,000 records, Amazon AppFlow uses Salesforce REST API. If your flow transfers 1,000 records or more, Amazon AppFlow uses Salesforce Bulk API 2.0.
     * Each of these Salesforce APIs structures data differently. If Amazon AppFlow selects the API automatically, be aware that, for recurring flows, the data output might vary from one flow run to the next. For example, if a flow runs daily, it might use REST API on one day to transfer 900 records, and it might use Bulk API 2.0 on the next day to transfer 1,100 records. For each of these flow runs, the respective Salesforce API formats the data differently. Some of the differences include how dates are formatted and null values are represented. Also, Bulk API 2.0 doesn't transfer Salesforce compound fields.
     * By choosing this option, you optimize flow performance for both small and large data transfers, but the tradeoff is inconsistent formatting in the output.
     * - **BULKV2** - Amazon AppFlow uses only Salesforce Bulk API 2.0. This API runs asynchronous data transfers, and it's optimal for large sets of data. By choosing this option, you ensure that your flow writes consistent output, but you optimize performance only for large data transfers.
     * Note that Bulk API 2.0 does not transfer Salesforce compound fields.
     * - **REST_SYNC** - Amazon AppFlow uses only Salesforce REST API. By choosing this option, you ensure that your flow writes consistent output, but you decrease performance for large data transfers that are better suited for Bulk API 2.0. In some cases, if your flow attempts to transfer a vary large set of data, it might fail with a timed out error.
     */
    @JvmName("xirnkdnsmmkwlovi")
    public suspend fun dataTransferApi(`value`: FlowDataTransferApi?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.dataTransferApi = mapped
    }

    /**
     * @param value The settings that determine how Amazon AppFlow handles an error when placing data in the Salesforce destination. For example, this setting would determine if the flow should fail after one insertion error, or continue and attempt to insert every record regardless of the initial failure. `ErrorHandlingConfig` is a part of the destination connector details.
     */
    @JvmName("fxoywjtwlgmplibd")
    public suspend fun errorHandlingConfig(`value`: FlowErrorHandlingConfigArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.errorHandlingConfig = mapped
    }

    /**
     * @param argument The settings that determine how Amazon AppFlow handles an error when placing data in the Salesforce destination. For example, this setting would determine if the flow should fail after one insertion error, or continue and attempt to insert every record regardless of the initial failure. `ErrorHandlingConfig` is a part of the destination connector details.
     */
    @JvmName("llkjgxpehabvqfnj")
    public suspend fun errorHandlingConfig(argument: suspend FlowErrorHandlingConfigArgsBuilder.() -> Unit) {
        val toBeMapped = FlowErrorHandlingConfigArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.errorHandlingConfig = mapped
    }

    /**
     * @param value List of fields used as ID when performing a write operation.
     */
    @JvmName("vtvqrjfirthfgewx")
    public suspend fun idFieldNames(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.idFieldNames = mapped
    }

    /**
     * @param values List of fields used as ID when performing a write operation.
     */
    @JvmName("ubgagudijbavleym")
    public suspend fun idFieldNames(vararg values: String) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.idFieldNames = mapped
    }

    /**
     * @param value The object specified in the Salesforce flow destination.
     */
    @JvmName("tuwfdmfdwtxonpky")
    public suspend fun `object`(`value`: String) {
        val toBeMapped = value
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.`object` = mapped
    }

    /**
     * @param value This specifies the type of write operation to be performed in Salesforce. When the value is `UPSERT` , then `idFieldNames` is required.
     */
    @JvmName("jfbmytgbmreaytmp")
    public suspend fun writeOperationType(`value`: FlowWriteOperationType?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.writeOperationType = mapped
    }

    internal fun build(): FlowSalesforceDestinationPropertiesArgs =
        FlowSalesforceDestinationPropertiesArgs(
            dataTransferApi = dataTransferApi,
            errorHandlingConfig = errorHandlingConfig,
            idFieldNames = idFieldNames,
            `object` = `object` ?: throw PulumiNullFieldException("object"),
            writeOperationType = writeOperationType,
        )
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy