commonMain.com.xebia.functional.openai.generated.model.MessageObjectIncompleteDetails.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of xef-openai-client-jvm Show documentation
Show all versions of xef-openai-client-jvm Show documentation
Building applications with LLMs through composability in Kotlin
/**
*
* Please note:
* This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* Do not edit this file manually.
*
*/
@file:Suppress(
"ArrayInDataClass",
"EnumEntryName",
"RemoveRedundantQualifierName",
"UnusedImport"
)
package com.xebia.functional.openai.generated.model
import kotlinx.serialization.Serializable
import kotlinx.serialization.SerialName
import kotlinx.serialization.Contextual
import kotlin.js.JsName
import kotlinx.serialization.json.*
/**
* On an incomplete message, details about why the message is incomplete.
*
* @param reason The reason the message is incomplete.
*/
@Serializable
data class MessageObjectIncompleteDetails (
/* The reason the message is incomplete. */
@SerialName(value = "reason") val reason: MessageObjectIncompleteDetails.Reason
) {
/**
* The reason the message is incomplete.
*
* Values: content_filter,max_tokens,run_cancelled,run_expired,run_failed
*/
@Serializable
enum class Reason(val value: kotlin.String) {
@SerialName(value = "content_filter") content_filter("content_filter"),
@SerialName(value = "max_tokens") max_tokens("max_tokens"),
@SerialName(value = "run_cancelled") run_cancelled("run_cancelled"),
@SerialName(value = "run_expired") run_expired("run_expired"),
@SerialName(value = "run_failed") run_failed("run_failed");
}
}