commonMain.com.xebia.functional.openai.generated.model.ChatCompletionRequestToolMessage.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of xef-openai-client Show documentation
Show all versions of xef-openai-client Show documentation
Building applications with LLMs through composability in Kotlin
/**
*
* Please note:
* This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* Do not edit this file manually.
*
*/
@file:Suppress(
"ArrayInDataClass",
"EnumEntryName",
"RemoveRedundantQualifierName",
"UnusedImport"
)
package com.xebia.functional.openai.generated.model
import com.xebia.functional.openai.generated.model.ChatCompletionRequestToolMessageContent
import kotlinx.serialization.Serializable
import kotlinx.serialization.SerialName
import kotlinx.serialization.Contextual
import kotlin.js.JsName
import kotlinx.serialization.json.*
/**
*
*
* @param role The role of the messages author, in this case `tool`.
* @param content
* @param toolCallId Tool call that this message is responding to.
*/
@Serializable
data class ChatCompletionRequestToolMessage (
/* The role of the messages author, in this case `tool`. */
@SerialName(value = "role") val role: ChatCompletionRequestToolMessage.Role,
@SerialName(value = "content") val content: ChatCompletionRequestToolMessageContent,
/* Tool call that this message is responding to. */
@SerialName(value = "tool_call_id") val toolCallId: kotlin.String
) {
/**
* The role of the messages author, in this case `tool`.
*
* Values: tool
*/
@Serializable
enum class Role(val value: kotlin.String) {
@SerialName(value = "tool") tool("tool");
}
}