![JAR search and dependency download from the Maven repository](/logo.png)
com.pulumi.awsnative.bedrock.kotlin.outputs.FlowVersionPromptModelInferenceConfiguration.kt Maven / Gradle / Ivy
@file:Suppress("NAME_SHADOWING", "DEPRECATION")
package com.pulumi.awsnative.bedrock.kotlin.outputs
import kotlin.Double
import kotlin.String
import kotlin.Suppress
import kotlin.collections.List
/**
* Prompt model inference configuration
* @property maxTokens Maximum length of output
* @property stopSequences List of stop sequences
* @property temperature Controls randomness, higher values increase diversity
* @property topK Sample from the k most likely next tokens
* @property topP Cumulative probability cutoff for token selection
*/
public data class FlowVersionPromptModelInferenceConfiguration(
public val maxTokens: Double? = null,
public val stopSequences: List? = null,
public val temperature: Double? = null,
public val topK: Double? = null,
public val topP: Double? = null,
) {
public companion object {
public fun toKotlin(javaType: com.pulumi.awsnative.bedrock.outputs.FlowVersionPromptModelInferenceConfiguration): FlowVersionPromptModelInferenceConfiguration = FlowVersionPromptModelInferenceConfiguration(
maxTokens = javaType.maxTokens().map({ args0 -> args0 }).orElse(null),
stopSequences = javaType.stopSequences().map({ args0 -> args0 }),
temperature = javaType.temperature().map({ args0 -> args0 }).orElse(null),
topK = javaType.topK().map({ args0 -> args0 }).orElse(null),
topP = javaType.topP().map({ args0 -> args0 }).orElse(null),
)
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy