com.pulumi.awsnative.bedrock.kotlin.inputs.PromptModelInferenceConfigurationArgs.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of pulumi-aws-native-kotlin Show documentation
Show all versions of pulumi-aws-native-kotlin Show documentation
Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.awsnative.bedrock.kotlin.inputs
import com.pulumi.awsnative.bedrock.inputs.PromptModelInferenceConfigurationArgs.builder
import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import kotlin.Double
import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
import kotlin.jvm.JvmName
/**
* Prompt model inference configuration
* @property maxTokens Maximum length of output
* @property stopSequences List of stop sequences
* @property temperature Controls randomness, higher values increase diversity
* @property topK Sample from the k most likely next tokens
* @property topP Cumulative probability cutoff for token selection
*/
public data class PromptModelInferenceConfigurationArgs(
public val maxTokens: Output? = null,
public val stopSequences: Output>? = null,
public val temperature: Output? = null,
public val topK: Output? = null,
public val topP: Output? = null,
) : ConvertibleToJava {
override fun toJava(): com.pulumi.awsnative.bedrock.inputs.PromptModelInferenceConfigurationArgs =
com.pulumi.awsnative.bedrock.inputs.PromptModelInferenceConfigurationArgs.builder()
.maxTokens(maxTokens?.applyValue({ args0 -> args0 }))
.stopSequences(stopSequences?.applyValue({ args0 -> args0.map({ args0 -> args0 }) }))
.temperature(temperature?.applyValue({ args0 -> args0 }))
.topK(topK?.applyValue({ args0 -> args0 }))
.topP(topP?.applyValue({ args0 -> args0 })).build()
}
/**
* Builder for [PromptModelInferenceConfigurationArgs].
*/
@PulumiTagMarker
public class PromptModelInferenceConfigurationArgsBuilder internal constructor() {
private var maxTokens: Output? = null
private var stopSequences: Output>? = null
private var temperature: Output? = null
private var topK: Output? = null
private var topP: Output? = null
/**
* @param value Maximum length of output
*/
@JvmName("dlvtbjoliuapbptd")
public suspend fun maxTokens(`value`: Output) {
this.maxTokens = value
}
/**
* @param value List of stop sequences
*/
@JvmName("yqjlotouscqpnblc")
public suspend fun stopSequences(`value`: Output>) {
this.stopSequences = value
}
@JvmName("jncmmlowcpairtcr")
public suspend fun stopSequences(vararg values: Output) {
this.stopSequences = Output.all(values.asList())
}
/**
* @param values List of stop sequences
*/
@JvmName("wehtbdoergfvlhkm")
public suspend fun stopSequences(values: List