All Downloads are FREE. Search and download functionalities are using the official Maven repository.

commonMain.com.xebia.functional.openai.generated.model.ChatCompletionStreamOptions.kt Maven / Gradle / Ivy

There is a newer version: 0.0.5-alpha.119
Show newest version
/**
 *
 * Please note:
 * This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
 * Do not edit this file manually.
 *
 */

@file:Suppress(
    "ArrayInDataClass",
    "EnumEntryName",
    "RemoveRedundantQualifierName",
    "UnusedImport"
)

package com.xebia.functional.openai.generated.model




import kotlinx.serialization.Serializable
import kotlinx.serialization.SerialName
import kotlinx.serialization.Contextual
import kotlin.js.JsName
import kotlinx.serialization.json.*

/**
* Options for streaming response. Only set this when you set `stream: true`. 
*
    * @param includeUsage If set, an additional chunk will be streamed before the `data: [DONE]` message. The `usage` field on this chunk shows the token usage statistics for the entire request, and the `choices` field will always be an empty array. All other chunks will also include a `usage` field, but with a null value. 
*/
@Serializable


data class ChatCompletionStreamOptions (
        /* If set, an additional chunk will be streamed before the `data: [DONE]` message. The `usage` field on this chunk shows the token usage statistics for the entire request, and the `choices` field will always be an empty array. All other chunks will also include a `usage` field, but with a null value.  */
    @SerialName(value = "include_usage") val includeUsage: kotlin.Boolean? = null
) {

}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy