commonMain.com.xebia.functional.openai.generated.model.RunObjectLastError.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of xef-openai-client Show documentation
Show all versions of xef-openai-client Show documentation
Building applications with LLMs through composability in Kotlin
/**
*
* Please note:
* This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* Do not edit this file manually.
*
*/
@file:Suppress(
"ArrayInDataClass",
"EnumEntryName",
"RemoveRedundantQualifierName",
"UnusedImport"
)
package com.xebia.functional.openai.generated.model
import kotlinx.serialization.Serializable
import kotlinx.serialization.SerialName
import kotlinx.serialization.Contextual
import kotlin.js.JsName
import kotlinx.serialization.json.*
/**
* The last error associated with this run. Will be `null` if there are no errors.
*
* @param code One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`.
* @param message A human-readable description of the error.
*/
@Serializable
data class RunObjectLastError (
/* One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. */
@SerialName(value = "code") val code: RunObjectLastError.Code,
/* A human-readable description of the error. */
@SerialName(value = "message") val message: kotlin.String
) {
/**
* One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`.
*
* Values: server_error,rate_limit_exceeded,invalid_prompt
*/
@Serializable
enum class Code(val value: kotlin.String) {
@SerialName(value = "server_error") server_error("server_error"),
@SerialName(value = "rate_limit_exceeded") rate_limit_exceeded("rate_limit_exceeded"),
@SerialName(value = "invalid_prompt") invalid_prompt("invalid_prompt");
}
}