commonMain.com.xebia.functional.openai.generated.api.Chat.kt Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of xef-openai-client Show documentation
Show all versions of xef-openai-client Show documentation
Building applications with LLMs through composability in Kotlin
/**
*
* Please note:
* This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* Do not edit this file manually.
*
*/
@file:Suppress(
"ArrayInDataClass",
"EnumEntryName",
"RemoveRedundantQualifierName",
"UnusedImport"
)
package com.xebia.functional.openai.generated.api
import com.xebia.functional.openai.generated.model.CreateChatCompletionRequest
import com.xebia.functional.openai.generated.model.CreateChatCompletionResponse
import com.xebia.functional.openai.generated.model.CreateChatCompletionStreamResponse
import com.xebia.functional.openai.Config
import com.xebia.functional.openai.UploadFile
import com.xebia.functional.openai.appendGen
import com.xebia.functional.openai.generated.api.Chat.*
import com.xebia.functional.openai.streamEvents
import com.xebia.functional.openai.errors.serializeOrThrowWithResponseInfo
import io.ktor.client.HttpClient
import io.ktor.client.call.body
import io.ktor.client.plugins.timeout
import io.ktor.client.request.HttpRequestBuilder
import io.ktor.client.request.accept
import io.ktor.client.request.header
import io.ktor.client.request.forms.formData
import io.ktor.client.request.parameter
import io.ktor.client.request.prepareRequest
import io.ktor.client.request.request
import io.ktor.client.request.setBody
import io.ktor.client.statement.HttpResponse
import io.ktor.client.statement.HttpStatement
import io.ktor.http.ContentType
import io.ktor.http.HttpHeaders
import io.ktor.http.HttpMethod
import io.ktor.http.contentType
import io.ktor.http.path
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.flow
import kotlinx.serialization.*
import kotlinx.serialization.descriptors.*
import kotlinx.serialization.encoding.*
import kotlinx.serialization.json.Json
import kotlinx.serialization.json.JsonObject
import kotlinx.serialization.json.jsonObject
import kotlinx.serialization.json.JsonPrimitive
import kotlin.time.Duration.Companion.seconds
import kotlin.time.DurationUnit
/**
*
*/
interface Chat {
/**
* Creates a model response for the given chat conversation.
*
* @param createChatCompletionRequest
* @param configure optional configuration for the request, allows overriding the default configuration.
* @return CreateChatCompletionResponse
*/
suspend fun createChatCompletion(createChatCompletionRequest: CreateChatCompletionRequest, configure: HttpRequestBuilder.() -> Unit = {}): CreateChatCompletionResponse
/**
* Streaming variant: Creates a model response for the given chat conversation.
* By default, the client is modified to timeout after 60 seconds. Which is overridable by the [configure].
*
* @param createChatCompletionRequest
* @param configure optional configuration for the request, allows overriding the default configuration.
* @return [Flow]<[CreateChatCompletionStreamResponse]>
*/
fun createChatCompletionStream(createChatCompletionRequest: CreateChatCompletionRequest, configure: HttpRequestBuilder.() -> Unit = {}): Flow
}
fun Chat(client: HttpClient, config: Config): com.xebia.functional.openai.generated.api.Chat = object : com.xebia.functional.openai.generated.api.Chat {
override suspend fun createChatCompletion(createChatCompletionRequest: CreateChatCompletionRequest, configure: HttpRequestBuilder.() -> Unit): CreateChatCompletionResponse =
client.request {
configure()
method = HttpMethod.Post
contentType(ContentType.Application.Json)
url { path("chat/completions") }
setBody(createChatCompletionRequest
)
}.serializeOrThrowWithResponseInfo()
override fun createChatCompletionStream(createChatCompletionRequest: CreateChatCompletionRequest, configure: HttpRequestBuilder.() -> Unit): Flow = flow {
client.prepareRequest {
timeout {
requestTimeoutMillis = 60.seconds.toLong(DurationUnit.MILLISECONDS)
socketTimeoutMillis = 60.seconds.toLong(DurationUnit.MILLISECONDS)
}
configure()
method = HttpMethod.Post
accept(ContentType.Text.EventStream)
header(HttpHeaders.CacheControl, "no-cache")
header(HttpHeaders.Connection, "keep-alive")
contentType(ContentType.Application.Json)
url { path("chat/completions") }
val element = Json.encodeToJsonElement(CreateChatCompletionRequest.serializer(), createChatCompletionRequest)
val jsObject = JsonObject(element.jsonObject + Pair("stream", JsonPrimitive(true)))
setBody(jsObject)
}.execute { streamEvents(it, config.json, config.streamingPrefix, config.streamingDelimiter) }
}
}