All Downloads are FREE. Search and download functionalities are using the official Maven repository.

commonMain.aws.sdk.kotlin.services.cleanroomsml.model.GetTrainedModelInferenceJobResponse.kt Maven / Gradle / Ivy

There is a newer version: 1.3.79
Show newest version
// Code generated by smithy-kotlin-codegen. DO NOT EDIT!

package aws.sdk.kotlin.services.cleanroomsml.model

import aws.smithy.kotlin.runtime.SdkDsl
import aws.smithy.kotlin.runtime.time.Instant

public class GetTrainedModelInferenceJobResponse private constructor(builder: Builder) {
    /**
     * The Amazon Resource Name (ARN) of the configured model algorithm association that was used for the trained model inference job.
     */
    public val configuredModelAlgorithmAssociationArn: kotlin.String? = builder.configuredModelAlgorithmAssociationArn
    /**
     * The execution parameters for the model inference job container.
     */
    public val containerExecutionParameters: aws.sdk.kotlin.services.cleanroomsml.model.InferenceContainerExecutionParameters? = builder.containerExecutionParameters
    /**
     * The time at which the trained model inference job was created.
     */
    public val createTime: aws.smithy.kotlin.runtime.time.Instant = requireNotNull(builder.createTime) { "A non-null value must be provided for createTime" }
    /**
     * The data source that was used for the trained model inference job.
     */
    public val dataSource: aws.sdk.kotlin.services.cleanroomsml.model.ModelInferenceDataSource? = builder.dataSource
    /**
     * The description of the trained model inference job.
     */
    public val description: kotlin.String? = builder.description
    /**
     * The environment variables to set in the Docker container.
     */
    public val environment: Map? = builder.environment
    /**
     * Information about the training container image.
     */
    public val inferenceContainerImageDigest: kotlin.String? = builder.inferenceContainerImageDigest
    /**
     * The Amazon Resource Name (ARN) of the KMS key. This key is used to encrypt and decrypt customer-owned data in the ML inference job and associated data.
     */
    public val kmsKeyArn: kotlin.String? = builder.kmsKeyArn
    /**
     * The logs status for the trained model inference job.
     */
    public val logsStatus: aws.sdk.kotlin.services.cleanroomsml.model.LogsStatus? = builder.logsStatus
    /**
     * Details about the logs status for the trained model inference job.
     */
    public val logsStatusDetails: kotlin.String? = builder.logsStatusDetails
    /**
     * The membership ID of the membership that contains the trained model inference job.
     */
    public val membershipIdentifier: kotlin.String = requireNotNull(builder.membershipIdentifier) { "A non-null value must be provided for membershipIdentifier" }
    /**
     * The metrics status for the trained model inference job.
     */
    public val metricsStatus: aws.sdk.kotlin.services.cleanroomsml.model.MetricsStatus? = builder.metricsStatus
    /**
     * Details about the metrics status for the trained model inference job.
     */
    public val metricsStatusDetails: kotlin.String? = builder.metricsStatusDetails
    /**
     * The name of the trained model inference job.
     */
    public val name: kotlin.String = requireNotNull(builder.name) { "A non-null value must be provided for name" }
    /**
     * The output configuration information for the trained model inference job.
     */
    public val outputConfiguration: aws.sdk.kotlin.services.cleanroomsml.model.InferenceOutputConfiguration? = builder.outputConfiguration
    /**
     * The resource configuration information for the trained model inference job.
     */
    public val resourceConfig: aws.sdk.kotlin.services.cleanroomsml.model.InferenceResourceConfig? = builder.resourceConfig
    /**
     * The status of the trained model inference job.
     */
    public val status: aws.sdk.kotlin.services.cleanroomsml.model.TrainedModelInferenceJobStatus = requireNotNull(builder.status) { "A non-null value must be provided for status" }
    /**
     * Details about the status of a resource.
     */
    public val statusDetails: aws.sdk.kotlin.services.cleanroomsml.model.StatusDetails? = builder.statusDetails
    /**
     * The optional metadata that you applied to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.
     *
     * The following basic restrictions apply to tags:
     * + Maximum number of tags per resource - 50.
     * + For each resource, each tag key must be unique, and each tag key can have only one value.
     * + Maximum key length - 128 Unicode characters in UTF-8.
     * + Maximum value length - 256 Unicode characters in UTF-8.
     * + If your tagging schema is used across multiple services and resources, remember that other services may have restrictions on allowed characters. Generally allowed characters are: letters, numbers, and spaces representable in UTF-8, and the following characters: + - = . _ : / @.
     * + Tag keys and values are case sensitive.
     * + Do not use aws:, AWS:, or any upper or lowercase combination of such as a prefix for keys as it is reserved for AWS use. You cannot edit or delete tag keys with this prefix. Values can have this prefix. If a tag value has aws as its prefix but the key does not, then Clean Rooms ML considers it to be a user tag and will count against the limit of 50 tags. Tags with only the key prefix of aws do not count against your tags per resource limit.
     */
    public val tags: Map? = builder.tags
    /**
     * The Amazon Resource Name (ARN) for the trained model that was used for the trained model inference job.
     */
    public val trainedModelArn: kotlin.String = requireNotNull(builder.trainedModelArn) { "A non-null value must be provided for trainedModelArn" }
    /**
     * The Amazon Resource Name (ARN) of the trained model inference job.
     */
    public val trainedModelInferenceJobArn: kotlin.String = requireNotNull(builder.trainedModelInferenceJobArn) { "A non-null value must be provided for trainedModelInferenceJobArn" }
    /**
     * The most recent time at which the trained model inference job was updated.
     */
    public val updateTime: aws.smithy.kotlin.runtime.time.Instant = requireNotNull(builder.updateTime) { "A non-null value must be provided for updateTime" }

    public companion object {
        public operator fun invoke(block: Builder.() -> kotlin.Unit): aws.sdk.kotlin.services.cleanroomsml.model.GetTrainedModelInferenceJobResponse = Builder().apply(block).build()
    }

    override fun toString(): kotlin.String = buildString {
        append("GetTrainedModelInferenceJobResponse(")
        append("configuredModelAlgorithmAssociationArn=$configuredModelAlgorithmAssociationArn,")
        append("containerExecutionParameters=$containerExecutionParameters,")
        append("createTime=$createTime,")
        append("dataSource=$dataSource,")
        append("description=$description,")
        append("environment=$environment,")
        append("inferenceContainerImageDigest=$inferenceContainerImageDigest,")
        append("kmsKeyArn=$kmsKeyArn,")
        append("logsStatus=$logsStatus,")
        append("logsStatusDetails=$logsStatusDetails,")
        append("membershipIdentifier=$membershipIdentifier,")
        append("metricsStatus=$metricsStatus,")
        append("metricsStatusDetails=$metricsStatusDetails,")
        append("name=$name,")
        append("outputConfiguration=$outputConfiguration,")
        append("resourceConfig=$resourceConfig,")
        append("status=$status,")
        append("statusDetails=$statusDetails,")
        append("tags=$tags,")
        append("trainedModelArn=$trainedModelArn,")
        append("trainedModelInferenceJobArn=$trainedModelInferenceJobArn,")
        append("updateTime=$updateTime")
        append(")")
    }

    override fun hashCode(): kotlin.Int {
        var result = configuredModelAlgorithmAssociationArn?.hashCode() ?: 0
        result = 31 * result + (containerExecutionParameters?.hashCode() ?: 0)
        result = 31 * result + (createTime.hashCode())
        result = 31 * result + (dataSource?.hashCode() ?: 0)
        result = 31 * result + (description?.hashCode() ?: 0)
        result = 31 * result + (environment?.hashCode() ?: 0)
        result = 31 * result + (inferenceContainerImageDigest?.hashCode() ?: 0)
        result = 31 * result + (kmsKeyArn?.hashCode() ?: 0)
        result = 31 * result + (logsStatus?.hashCode() ?: 0)
        result = 31 * result + (logsStatusDetails?.hashCode() ?: 0)
        result = 31 * result + (membershipIdentifier.hashCode())
        result = 31 * result + (metricsStatus?.hashCode() ?: 0)
        result = 31 * result + (metricsStatusDetails?.hashCode() ?: 0)
        result = 31 * result + (name.hashCode())
        result = 31 * result + (outputConfiguration?.hashCode() ?: 0)
        result = 31 * result + (resourceConfig?.hashCode() ?: 0)
        result = 31 * result + (status.hashCode())
        result = 31 * result + (statusDetails?.hashCode() ?: 0)
        result = 31 * result + (tags?.hashCode() ?: 0)
        result = 31 * result + (trainedModelArn.hashCode())
        result = 31 * result + (trainedModelInferenceJobArn.hashCode())
        result = 31 * result + (updateTime.hashCode())
        return result
    }

    override fun equals(other: kotlin.Any?): kotlin.Boolean {
        if (this === other) return true
        if (other == null || this::class != other::class) return false

        other as GetTrainedModelInferenceJobResponse

        if (configuredModelAlgorithmAssociationArn != other.configuredModelAlgorithmAssociationArn) return false
        if (containerExecutionParameters != other.containerExecutionParameters) return false
        if (createTime != other.createTime) return false
        if (dataSource != other.dataSource) return false
        if (description != other.description) return false
        if (environment != other.environment) return false
        if (inferenceContainerImageDigest != other.inferenceContainerImageDigest) return false
        if (kmsKeyArn != other.kmsKeyArn) return false
        if (logsStatus != other.logsStatus) return false
        if (logsStatusDetails != other.logsStatusDetails) return false
        if (membershipIdentifier != other.membershipIdentifier) return false
        if (metricsStatus != other.metricsStatus) return false
        if (metricsStatusDetails != other.metricsStatusDetails) return false
        if (name != other.name) return false
        if (outputConfiguration != other.outputConfiguration) return false
        if (resourceConfig != other.resourceConfig) return false
        if (status != other.status) return false
        if (statusDetails != other.statusDetails) return false
        if (tags != other.tags) return false
        if (trainedModelArn != other.trainedModelArn) return false
        if (trainedModelInferenceJobArn != other.trainedModelInferenceJobArn) return false
        if (updateTime != other.updateTime) return false

        return true
    }

    public inline fun copy(block: Builder.() -> kotlin.Unit = {}): aws.sdk.kotlin.services.cleanroomsml.model.GetTrainedModelInferenceJobResponse = Builder(this).apply(block).build()

    @SdkDsl
    public class Builder {
        /**
         * The Amazon Resource Name (ARN) of the configured model algorithm association that was used for the trained model inference job.
         */
        public var configuredModelAlgorithmAssociationArn: kotlin.String? = null
        /**
         * The execution parameters for the model inference job container.
         */
        public var containerExecutionParameters: aws.sdk.kotlin.services.cleanroomsml.model.InferenceContainerExecutionParameters? = null
        /**
         * The time at which the trained model inference job was created.
         */
        public var createTime: aws.smithy.kotlin.runtime.time.Instant? = null
        /**
         * The data source that was used for the trained model inference job.
         */
        public var dataSource: aws.sdk.kotlin.services.cleanroomsml.model.ModelInferenceDataSource? = null
        /**
         * The description of the trained model inference job.
         */
        public var description: kotlin.String? = null
        /**
         * The environment variables to set in the Docker container.
         */
        public var environment: Map? = null
        /**
         * Information about the training container image.
         */
        public var inferenceContainerImageDigest: kotlin.String? = null
        /**
         * The Amazon Resource Name (ARN) of the KMS key. This key is used to encrypt and decrypt customer-owned data in the ML inference job and associated data.
         */
        public var kmsKeyArn: kotlin.String? = null
        /**
         * The logs status for the trained model inference job.
         */
        public var logsStatus: aws.sdk.kotlin.services.cleanroomsml.model.LogsStatus? = null
        /**
         * Details about the logs status for the trained model inference job.
         */
        public var logsStatusDetails: kotlin.String? = null
        /**
         * The membership ID of the membership that contains the trained model inference job.
         */
        public var membershipIdentifier: kotlin.String? = null
        /**
         * The metrics status for the trained model inference job.
         */
        public var metricsStatus: aws.sdk.kotlin.services.cleanroomsml.model.MetricsStatus? = null
        /**
         * Details about the metrics status for the trained model inference job.
         */
        public var metricsStatusDetails: kotlin.String? = null
        /**
         * The name of the trained model inference job.
         */
        public var name: kotlin.String? = null
        /**
         * The output configuration information for the trained model inference job.
         */
        public var outputConfiguration: aws.sdk.kotlin.services.cleanroomsml.model.InferenceOutputConfiguration? = null
        /**
         * The resource configuration information for the trained model inference job.
         */
        public var resourceConfig: aws.sdk.kotlin.services.cleanroomsml.model.InferenceResourceConfig? = null
        /**
         * The status of the trained model inference job.
         */
        public var status: aws.sdk.kotlin.services.cleanroomsml.model.TrainedModelInferenceJobStatus? = null
        /**
         * Details about the status of a resource.
         */
        public var statusDetails: aws.sdk.kotlin.services.cleanroomsml.model.StatusDetails? = null
        /**
         * The optional metadata that you applied to the resource to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define.
         *
         * The following basic restrictions apply to tags:
         * + Maximum number of tags per resource - 50.
         * + For each resource, each tag key must be unique, and each tag key can have only one value.
         * + Maximum key length - 128 Unicode characters in UTF-8.
         * + Maximum value length - 256 Unicode characters in UTF-8.
         * + If your tagging schema is used across multiple services and resources, remember that other services may have restrictions on allowed characters. Generally allowed characters are: letters, numbers, and spaces representable in UTF-8, and the following characters: + - = . _ : / @.
         * + Tag keys and values are case sensitive.
         * + Do not use aws:, AWS:, or any upper or lowercase combination of such as a prefix for keys as it is reserved for AWS use. You cannot edit or delete tag keys with this prefix. Values can have this prefix. If a tag value has aws as its prefix but the key does not, then Clean Rooms ML considers it to be a user tag and will count against the limit of 50 tags. Tags with only the key prefix of aws do not count against your tags per resource limit.
         */
        public var tags: Map? = null
        /**
         * The Amazon Resource Name (ARN) for the trained model that was used for the trained model inference job.
         */
        public var trainedModelArn: kotlin.String? = null
        /**
         * The Amazon Resource Name (ARN) of the trained model inference job.
         */
        public var trainedModelInferenceJobArn: kotlin.String? = null
        /**
         * The most recent time at which the trained model inference job was updated.
         */
        public var updateTime: aws.smithy.kotlin.runtime.time.Instant? = null

        @PublishedApi
        internal constructor()
        @PublishedApi
        internal constructor(x: aws.sdk.kotlin.services.cleanroomsml.model.GetTrainedModelInferenceJobResponse) : this() {
            this.configuredModelAlgorithmAssociationArn = x.configuredModelAlgorithmAssociationArn
            this.containerExecutionParameters = x.containerExecutionParameters
            this.createTime = x.createTime
            this.dataSource = x.dataSource
            this.description = x.description
            this.environment = x.environment
            this.inferenceContainerImageDigest = x.inferenceContainerImageDigest
            this.kmsKeyArn = x.kmsKeyArn
            this.logsStatus = x.logsStatus
            this.logsStatusDetails = x.logsStatusDetails
            this.membershipIdentifier = x.membershipIdentifier
            this.metricsStatus = x.metricsStatus
            this.metricsStatusDetails = x.metricsStatusDetails
            this.name = x.name
            this.outputConfiguration = x.outputConfiguration
            this.resourceConfig = x.resourceConfig
            this.status = x.status
            this.statusDetails = x.statusDetails
            this.tags = x.tags
            this.trainedModelArn = x.trainedModelArn
            this.trainedModelInferenceJobArn = x.trainedModelInferenceJobArn
            this.updateTime = x.updateTime
        }

        @PublishedApi
        internal fun build(): aws.sdk.kotlin.services.cleanroomsml.model.GetTrainedModelInferenceJobResponse = GetTrainedModelInferenceJobResponse(this)

        /**
         * construct an [aws.sdk.kotlin.services.cleanroomsml.model.InferenceContainerExecutionParameters] inside the given [block]
         */
        public fun containerExecutionParameters(block: aws.sdk.kotlin.services.cleanroomsml.model.InferenceContainerExecutionParameters.Builder.() -> kotlin.Unit) {
            this.containerExecutionParameters = aws.sdk.kotlin.services.cleanroomsml.model.InferenceContainerExecutionParameters.invoke(block)
        }

        /**
         * construct an [aws.sdk.kotlin.services.cleanroomsml.model.ModelInferenceDataSource] inside the given [block]
         */
        public fun dataSource(block: aws.sdk.kotlin.services.cleanroomsml.model.ModelInferenceDataSource.Builder.() -> kotlin.Unit) {
            this.dataSource = aws.sdk.kotlin.services.cleanroomsml.model.ModelInferenceDataSource.invoke(block)
        }

        /**
         * construct an [aws.sdk.kotlin.services.cleanroomsml.model.InferenceOutputConfiguration] inside the given [block]
         */
        public fun outputConfiguration(block: aws.sdk.kotlin.services.cleanroomsml.model.InferenceOutputConfiguration.Builder.() -> kotlin.Unit) {
            this.outputConfiguration = aws.sdk.kotlin.services.cleanroomsml.model.InferenceOutputConfiguration.invoke(block)
        }

        /**
         * construct an [aws.sdk.kotlin.services.cleanroomsml.model.InferenceResourceConfig] inside the given [block]
         */
        public fun resourceConfig(block: aws.sdk.kotlin.services.cleanroomsml.model.InferenceResourceConfig.Builder.() -> kotlin.Unit) {
            this.resourceConfig = aws.sdk.kotlin.services.cleanroomsml.model.InferenceResourceConfig.invoke(block)
        }

        /**
         * construct an [aws.sdk.kotlin.services.cleanroomsml.model.StatusDetails] inside the given [block]
         */
        public fun statusDetails(block: aws.sdk.kotlin.services.cleanroomsml.model.StatusDetails.Builder.() -> kotlin.Unit) {
            this.statusDetails = aws.sdk.kotlin.services.cleanroomsml.model.StatusDetails.invoke(block)
        }

        internal fun correctErrors(): Builder {
            if (createTime == null) createTime = Instant.fromEpochSeconds(0)
            if (membershipIdentifier == null) membershipIdentifier = ""
            if (name == null) name = ""
            if (status == null) status = TrainedModelInferenceJobStatus.SdkUnknown("no value provided")
            if (trainedModelArn == null) trainedModelArn = ""
            if (trainedModelInferenceJobArn == null) trainedModelInferenceJobArn = ""
            if (updateTime == null) updateTime = Instant.fromEpochSeconds(0)
            return this
        }
    }
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy