All Downloads are FREE. Search and download functionalities are using the official Maven repository.

commonMain.aws.sdk.kotlin.services.bedrockruntime.DefaultBedrockRuntimeClient.kt Maven / Gradle / Ivy

There is a newer version: 1.3.80
Show newest version
// Code generated by smithy-kotlin-codegen. DO NOT EDIT!

package aws.sdk.kotlin.services.bedrockruntime

import aws.sdk.kotlin.runtime.http.ApiMetadata
import aws.sdk.kotlin.runtime.http.AwsUserAgentMetadata
import aws.sdk.kotlin.runtime.http.interceptors.AwsSpanInterceptor
import aws.sdk.kotlin.runtime.http.interceptors.BusinessMetricsInterceptor
import aws.sdk.kotlin.runtime.http.middleware.AwsRetryHeaderMiddleware
import aws.sdk.kotlin.runtime.http.middleware.RecursionDetection
import aws.sdk.kotlin.runtime.http.middleware.UserAgent
import aws.sdk.kotlin.services.bedrockruntime.auth.BedrockRuntimeAuthSchemeProviderAdapter
import aws.sdk.kotlin.services.bedrockruntime.auth.BedrockRuntimeIdentityProviderConfigAdapter
import aws.sdk.kotlin.services.bedrockruntime.endpoints.internal.EndpointResolverAdapter
import aws.sdk.kotlin.services.bedrockruntime.model.*
import aws.sdk.kotlin.services.bedrockruntime.serde.*
import aws.smithy.kotlin.runtime.auth.AuthSchemeId
import aws.smithy.kotlin.runtime.auth.awssigning.AwsSigningAttributes
import aws.smithy.kotlin.runtime.auth.awssigning.DefaultAwsSigner
import aws.smithy.kotlin.runtime.awsprotocol.AwsAttributes
import aws.smithy.kotlin.runtime.client.SdkClientOption
import aws.smithy.kotlin.runtime.collections.attributesOf
import aws.smithy.kotlin.runtime.collections.putIfAbsent
import aws.smithy.kotlin.runtime.collections.putIfAbsentNotNull
import aws.smithy.kotlin.runtime.http.SdkHttpClient
import aws.smithy.kotlin.runtime.http.auth.AuthScheme
import aws.smithy.kotlin.runtime.http.auth.SigV4AuthScheme
import aws.smithy.kotlin.runtime.http.operation.OperationAuthConfig
import aws.smithy.kotlin.runtime.http.operation.OperationMetrics
import aws.smithy.kotlin.runtime.http.operation.SdkHttpOperation
import aws.smithy.kotlin.runtime.http.operation.context
import aws.smithy.kotlin.runtime.http.operation.execute
import aws.smithy.kotlin.runtime.http.operation.roundTrip
import aws.smithy.kotlin.runtime.http.operation.telemetry
import aws.smithy.kotlin.runtime.io.SdkManagedGroup
import aws.smithy.kotlin.runtime.io.addIfManaged
import aws.smithy.kotlin.runtime.operation.ExecutionContext

internal class DefaultBedrockRuntimeClient(override val config: BedrockRuntimeClient.Config) : BedrockRuntimeClient {
    private val managedResources = SdkManagedGroup()
    private val client = SdkHttpClient(config.httpClient)
    private val identityProviderConfig = BedrockRuntimeIdentityProviderConfigAdapter(config)
    private val configuredAuthSchemes = with(config.authSchemes.associateBy(AuthScheme::schemeId).toMutableMap()){
        getOrPut(AuthSchemeId.AwsSigV4){
            SigV4AuthScheme(DefaultAwsSigner, "bedrock")
        }
        toMap()
    }
    private val authSchemeAdapter = BedrockRuntimeAuthSchemeProviderAdapter(config)
    private val telemetryScope = "aws.sdk.kotlin.services.bedrockruntime"
    private val opMetrics = OperationMetrics(telemetryScope, config.telemetryProvider)

    init {
        managedResources.addIfManaged(config.httpClient)
        managedResources.addIfManaged(config.credentialsProvider)
    }

    private val awsUserAgentMetadata = AwsUserAgentMetadata.fromEnvironment(ApiMetadata(ServiceId, SdkVersion), config.applicationId)

    /**
     * The action to apply a guardrail.
     */
    override suspend fun applyGuardrail(input: ApplyGuardrailRequest): ApplyGuardrailResponse {
        val op = SdkHttpOperation.build {
            serializeWith = ApplyGuardrailOperationSerializer()
            deserializeWith = ApplyGuardrailOperationDeserializer()
            operationName = "ApplyGuardrail"
            serviceName = ServiceId
            telemetry {
                provider = config.telemetryProvider
                scope = telemetryScope
                metrics = opMetrics
                attributes = attributesOf {
                    "rpc.system" to "aws-api"
                }
            }
            execution.auth = OperationAuthConfig(authSchemeAdapter, configuredAuthSchemes, identityProviderConfig)
            execution.endpointResolver = EndpointResolverAdapter(config)
            execution.retryStrategy = config.retryStrategy
            execution.retryPolicy = config.retryPolicy
        }
        mergeServiceDefaults(op.context)
        op.install(AwsRetryHeaderMiddleware())
        op.interceptors.add(AwsSpanInterceptor)
        op.interceptors.add(BusinessMetricsInterceptor())
        op.install(UserAgent(awsUserAgentMetadata))
        op.install(RecursionDetection())
        op.interceptors.addAll(config.interceptors)
        return op.roundTrip(client, input)
    }

    /**
     * Sends messages to the specified Amazon Bedrock model. `Converse` provides a consistent interface that works with all models that support messages. This allows you to write code once and use it with different models. If a model has unique inference parameters, you can also pass those unique parameters to the model.
     *
     * Amazon Bedrock doesn't store any text, images, or documents that you provide as content. The data is only used to generate the response.
     *
     * You can submit a prompt by including it in the `messages` field, specifying the `modelId` of a foundation model or inference profile to run inference on it, and including any other fields that are relevant to your use case.
     *
     * You can also submit a prompt from Prompt management by specifying the ARN of the prompt version and including a map of variables to values in the `promptVariables` field. You can append more messages to the prompt by using the `messages` field. If you use a prompt from Prompt management, you can't include the following fields in the request: `additionalModelRequestFields`, `inferenceConfig`, `system`, or `toolConfig`. Instead, these fields must be defined through Prompt management. For more information, see [Use a prompt from Prompt management](https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-management-use.html).
     *
     * For information about the Converse API, see *Use the Converse API* in the *Amazon Bedrock User Guide*. To use a guardrail, see *Use a guardrail with the Converse API* in the *Amazon Bedrock User Guide*. To use a tool with a model, see *Tool use (Function calling)* in the *Amazon Bedrock User Guide*
     *
     * For example code, see *Converse API examples* in the *Amazon Bedrock User Guide*.
     *
     * This operation requires permission for the `bedrock:InvokeModel` action.
     *
     * To deny all inference access to resources that you specify in the modelId field, you need to deny access to the `bedrock:InvokeModel` and `bedrock:InvokeModelWithResponseStream` actions. Doing this also denies access to the resource through the base inference actions ([InvokeModel](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_InvokeModel.html) and [InvokeModelWithResponseStream](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_InvokeModelWithResponseStream.html)). For more information see [Deny access for inference on specific models](https://docs.aws.amazon.com/bedrock/latest/userguide/security_iam_id-based-policy-examples.html#security_iam_id-based-policy-examples-deny-inference).
     *
     * For troubleshooting some of the common errors you might encounter when using the `Converse` API, see [Troubleshooting Amazon Bedrock API Error Codes](https://docs.aws.amazon.com/bedrock/latest/userguide/troubleshooting-api-error-codes.html) in the Amazon Bedrock User Guide
     */
    override suspend fun converse(input: ConverseRequest): ConverseResponse {
        val op = SdkHttpOperation.build {
            serializeWith = ConverseOperationSerializer()
            deserializeWith = ConverseOperationDeserializer()
            operationName = "Converse"
            serviceName = ServiceId
            telemetry {
                provider = config.telemetryProvider
                scope = telemetryScope
                metrics = opMetrics
                attributes = attributesOf {
                    "rpc.system" to "aws-api"
                }
            }
            execution.auth = OperationAuthConfig(authSchemeAdapter, configuredAuthSchemes, identityProviderConfig)
            execution.endpointResolver = EndpointResolverAdapter(config)
            execution.retryStrategy = config.retryStrategy
            execution.retryPolicy = config.retryPolicy
        }
        mergeServiceDefaults(op.context)
        op.install(AwsRetryHeaderMiddleware())
        op.interceptors.add(AwsSpanInterceptor)
        op.interceptors.add(BusinessMetricsInterceptor())
        op.install(UserAgent(awsUserAgentMetadata))
        op.install(RecursionDetection())
        op.interceptors.addAll(config.interceptors)
        return op.roundTrip(client, input)
    }

    /**
     * Sends messages to the specified Amazon Bedrock model and returns the response in a stream. `ConverseStream` provides a consistent API that works with all Amazon Bedrock models that support messages. This allows you to write code once and use it with different models. Should a model have unique inference parameters, you can also pass those unique parameters to the model.
     *
     * To find out if a model supports streaming, call [GetFoundationModel](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_GetFoundationModel.html) and check the `responseStreamingSupported` field in the response.
     *
     * The CLI doesn't support streaming operations in Amazon Bedrock, including `ConverseStream`.
     *
     * Amazon Bedrock doesn't store any text, images, or documents that you provide as content. The data is only used to generate the response.
     *
     * You can submit a prompt by including it in the `messages` field, specifying the `modelId` of a foundation model or inference profile to run inference on it, and including any other fields that are relevant to your use case.
     *
     * You can also submit a prompt from Prompt management by specifying the ARN of the prompt version and including a map of variables to values in the `promptVariables` field. You can append more messages to the prompt by using the `messages` field. If you use a prompt from Prompt management, you can't include the following fields in the request: `additionalModelRequestFields`, `inferenceConfig`, `system`, or `toolConfig`. Instead, these fields must be defined through Prompt management. For more information, see [Use a prompt from Prompt management](https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-management-use.html).
     *
     * For information about the Converse API, see *Use the Converse API* in the *Amazon Bedrock User Guide*. To use a guardrail, see *Use a guardrail with the Converse API* in the *Amazon Bedrock User Guide*. To use a tool with a model, see *Tool use (Function calling)* in the *Amazon Bedrock User Guide*
     *
     * For example code, see *Conversation streaming example* in the *Amazon Bedrock User Guide*.
     *
     * This operation requires permission for the `bedrock:InvokeModelWithResponseStream` action.
     *
     * To deny all inference access to resources that you specify in the modelId field, you need to deny access to the `bedrock:InvokeModel` and `bedrock:InvokeModelWithResponseStream` actions. Doing this also denies access to the resource through the base inference actions ([InvokeModel](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_InvokeModel.html) and [InvokeModelWithResponseStream](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_InvokeModelWithResponseStream.html)). For more information see [Deny access for inference on specific models](https://docs.aws.amazon.com/bedrock/latest/userguide/security_iam_id-based-policy-examples.html#security_iam_id-based-policy-examples-deny-inference).
     *
     * For troubleshooting some of the common errors you might encounter when using the `ConverseStream` API, see [Troubleshooting Amazon Bedrock API Error Codes](https://docs.aws.amazon.com/bedrock/latest/userguide/troubleshooting-api-error-codes.html) in the Amazon Bedrock User Guide
     */
    override suspend fun  converseStream(input: ConverseStreamRequest, block: suspend (ConverseStreamResponse) -> T): T {
        val op = SdkHttpOperation.build {
            serializeWith = ConverseStreamOperationSerializer()
            deserializeWith = ConverseStreamOperationDeserializer()
            operationName = "ConverseStream"
            serviceName = ServiceId
            telemetry {
                provider = config.telemetryProvider
                scope = telemetryScope
                metrics = opMetrics
                attributes = attributesOf {
                    "rpc.system" to "aws-api"
                }
            }
            execution.auth = OperationAuthConfig(authSchemeAdapter, configuredAuthSchemes, identityProviderConfig)
            execution.endpointResolver = EndpointResolverAdapter(config)
            execution.retryStrategy = config.retryStrategy
            execution.retryPolicy = config.retryPolicy
        }
        mergeServiceDefaults(op.context)
        op.install(AwsRetryHeaderMiddleware())
        op.interceptors.add(AwsSpanInterceptor)
        op.interceptors.add(BusinessMetricsInterceptor())
        op.install(UserAgent(awsUserAgentMetadata))
        op.install(RecursionDetection())
        op.interceptors.addAll(config.interceptors)
        return op.execute(client, input, block)
    }

    /**
     * Invokes the specified Amazon Bedrock model to run inference using the prompt and inference parameters provided in the request body. You use model inference to generate text, images, and embeddings.
     *
     * For example code, see *Invoke model code examples* in the *Amazon Bedrock User Guide*.
     *
     * This operation requires permission for the `bedrock:InvokeModel` action.
     *
     * To deny all inference access to resources that you specify in the modelId field, you need to deny access to the `bedrock:InvokeModel` and `bedrock:InvokeModelWithResponseStream` actions. Doing this also denies access to the resource through the Converse API actions ([Converse](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_Converse.html) and [ConverseStream](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_ConverseStream.html)). For more information see [Deny access for inference on specific models](https://docs.aws.amazon.com/bedrock/latest/userguide/security_iam_id-based-policy-examples.html#security_iam_id-based-policy-examples-deny-inference).
     *
     * For troubleshooting some of the common errors you might encounter when using the `InvokeModel` API, see [Troubleshooting Amazon Bedrock API Error Codes](https://docs.aws.amazon.com/bedrock/latest/userguide/troubleshooting-api-error-codes.html) in the Amazon Bedrock User Guide
     */
    override suspend fun invokeModel(input: InvokeModelRequest): InvokeModelResponse {
        val op = SdkHttpOperation.build {
            serializeWith = InvokeModelOperationSerializer()
            deserializeWith = InvokeModelOperationDeserializer()
            operationName = "InvokeModel"
            serviceName = ServiceId
            telemetry {
                provider = config.telemetryProvider
                scope = telemetryScope
                metrics = opMetrics
                attributes = attributesOf {
                    "rpc.system" to "aws-api"
                }
            }
            execution.auth = OperationAuthConfig(authSchemeAdapter, configuredAuthSchemes, identityProviderConfig)
            execution.endpointResolver = EndpointResolverAdapter(config)
            execution.retryStrategy = config.retryStrategy
            execution.retryPolicy = config.retryPolicy
        }
        mergeServiceDefaults(op.context)
        op.install(AwsRetryHeaderMiddleware())
        op.interceptors.add(AwsSpanInterceptor)
        op.interceptors.add(BusinessMetricsInterceptor())
        op.install(UserAgent(awsUserAgentMetadata))
        op.install(RecursionDetection())
        op.interceptors.addAll(config.interceptors)
        return op.roundTrip(client, input)
    }

    /**
     * Invoke the specified Amazon Bedrock model to run inference using the prompt and inference parameters provided in the request body. The response is returned in a stream.
     *
     * To see if a model supports streaming, call [GetFoundationModel](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_GetFoundationModel.html) and check the `responseStreamingSupported` field in the response.
     *
     * The CLI doesn't support streaming operations in Amazon Bedrock, including `InvokeModelWithResponseStream`.
     *
     * For example code, see *Invoke model with streaming code example* in the *Amazon Bedrock User Guide*.
     *
     * This operation requires permissions to perform the `bedrock:InvokeModelWithResponseStream` action.
     *
     * To deny all inference access to resources that you specify in the modelId field, you need to deny access to the `bedrock:InvokeModel` and `bedrock:InvokeModelWithResponseStream` actions. Doing this also denies access to the resource through the Converse API actions ([Converse](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_Converse.html) and [ConverseStream](https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_ConverseStream.html)). For more information see [Deny access for inference on specific models](https://docs.aws.amazon.com/bedrock/latest/userguide/security_iam_id-based-policy-examples.html#security_iam_id-based-policy-examples-deny-inference).
     *
     * For troubleshooting some of the common errors you might encounter when using the `InvokeModelWithResponseStream` API, see [Troubleshooting Amazon Bedrock API Error Codes](https://docs.aws.amazon.com/bedrock/latest/userguide/troubleshooting-api-error-codes.html) in the Amazon Bedrock User Guide
     */
    override suspend fun  invokeModelWithResponseStream(input: InvokeModelWithResponseStreamRequest, block: suspend (InvokeModelWithResponseStreamResponse) -> T): T {
        val op = SdkHttpOperation.build {
            serializeWith = InvokeModelWithResponseStreamOperationSerializer()
            deserializeWith = InvokeModelWithResponseStreamOperationDeserializer()
            operationName = "InvokeModelWithResponseStream"
            serviceName = ServiceId
            telemetry {
                provider = config.telemetryProvider
                scope = telemetryScope
                metrics = opMetrics
                attributes = attributesOf {
                    "rpc.system" to "aws-api"
                }
            }
            execution.auth = OperationAuthConfig(authSchemeAdapter, configuredAuthSchemes, identityProviderConfig)
            execution.endpointResolver = EndpointResolverAdapter(config)
            execution.retryStrategy = config.retryStrategy
            execution.retryPolicy = config.retryPolicy
        }
        mergeServiceDefaults(op.context)
        op.install(AwsRetryHeaderMiddleware())
        op.interceptors.add(AwsSpanInterceptor)
        op.interceptors.add(BusinessMetricsInterceptor())
        op.install(UserAgent(awsUserAgentMetadata))
        op.install(RecursionDetection())
        op.interceptors.addAll(config.interceptors)
        return op.execute(client, input, block)
    }

    override fun close() {
        managedResources.unshareAll()
    }

    /**
     * merge the defaults configured for the service into the execution context before firing off a request
     */
    private fun mergeServiceDefaults(ctx: ExecutionContext) {
        ctx.putIfAbsent(SdkClientOption.ClientName, config.clientName)
        ctx.putIfAbsent(SdkClientOption.LogMode, config.logMode)
        ctx.putIfAbsentNotNull(AwsAttributes.Region, config.region)
        ctx.putIfAbsentNotNull(AwsSigningAttributes.SigningRegion, config.region)
        ctx.putIfAbsent(AwsSigningAttributes.SigningService, "bedrock")
        ctx.putIfAbsent(AwsSigningAttributes.CredentialsProvider, config.credentialsProvider)
    }

}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy