All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.simiacryptus.openai.proxy.CompletionProxy.kt Maven / Gradle / Ivy

package com.simiacryptus.openai.proxy

import com.simiacryptus.openai.OpenAIClient.CompletionRequest
import com.simiacryptus.openai.OpenAIClient
import com.simiacryptus.util.JsonUtil.toJson
import org.slf4j.event.Level

class CompletionProxy(
    clazz: Class,
    apiKey: String,
    var model: OpenAIClient.Model = OpenAIClient.Models.DaVinci,
    var maxTokens: Int = 4000,
    temperature: Double = 0.7,
    var verbose: Boolean = false,
    private val moderated: Boolean = true,
    base: String = "https://api.openai.com/v1",
    val deserializerRetries: Int
) : GPTProxyBase(clazz, temperature, true, deserializerRetries) {
    val api: OpenAIClient

    init {
        api = OpenAIClient(apiKey, base, Level.DEBUG)
    }

    override fun complete(prompt: ProxyRequest, vararg examples: RequestResponse): String {
        if(verbose) log.info(prompt.toString())
        val request = CompletionRequest()
        request.prompt = """
        |Method: ${prompt.methodName}
        |Response Type: 
        |    ${prompt.apiYaml.replace("\n", "\n            ")}
        |Request: 
        |    {
        |        ${
            prompt.argList.entries.joinToString(",\n", transform = { (argName, argValue) ->
                """"$argName": $argValue"""
            }).replace("\n", "\n                ")
        }
        |    }
        |Response:
        |    {""".trim().trimIndent()
        request.max_tokens = maxTokens
        request.temperature = temperature
        if (moderated) api.moderate(toJson(request))
        val completion = api.complete(request, model).firstChoice.get().toString()
        if(verbose) log.info(completion)
        return "{$completion"
    }
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy