All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.aws.dms.inputs.EndpointKafkaSettingsArgs Maven / Gradle / Ivy

Go to download

A Pulumi package for creating and managing Amazon Web Services (AWS) cloud resources.

The newest version!
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***

package com.pulumi.aws.dms.inputs;

import com.pulumi.core.Output;
import com.pulumi.core.annotations.Import;
import com.pulumi.exceptions.MissingRequiredPropertyException;
import java.lang.Boolean;
import java.lang.Integer;
import java.lang.String;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Nullable;


public final class EndpointKafkaSettingsArgs extends com.pulumi.resources.ResourceArgs {

    public static final EndpointKafkaSettingsArgs Empty = new EndpointKafkaSettingsArgs();

    /**
     * Kafka broker location. Specify in the form broker-hostname-or-ip:port.
     * 
     */
    @Import(name="broker", required=true)
    private Output broker;

    /**
     * @return Kafka broker location. Specify in the form broker-hostname-or-ip:port.
     * 
     */
    public Output broker() {
        return this.broker;
    }

    /**
     * Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. Default is `false`.
     * 
     */
    @Import(name="includeControlDetails")
    private @Nullable Output includeControlDetails;

    /**
     * @return Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. Default is `false`.
     * 
     */
    public Optional> includeControlDetails() {
        return Optional.ofNullable(this.includeControlDetails);
    }

    /**
     * Include NULL and empty columns for records migrated to the endpoint. Default is `false`.
     * 
     */
    @Import(name="includeNullAndEmpty")
    private @Nullable Output includeNullAndEmpty;

    /**
     * @return Include NULL and empty columns for records migrated to the endpoint. Default is `false`.
     * 
     */
    public Optional> includeNullAndEmpty() {
        return Optional.ofNullable(this.includeNullAndEmpty);
    }

    /**
     * Shows the partition value within the Kafka message output unless the partition type is `schema-table-type`. Default is `false`.
     * 
     */
    @Import(name="includePartitionValue")
    private @Nullable Output includePartitionValue;

    /**
     * @return Shows the partition value within the Kafka message output unless the partition type is `schema-table-type`. Default is `false`.
     * 
     */
    public Optional> includePartitionValue() {
        return Optional.ofNullable(this.includePartitionValue);
    }

    /**
     * Includes any data definition language (DDL) operations that change the table in the control data, such as `rename-table`, `drop-table`, `add-column`, `drop-column`, and `rename-column`. Default is `false`.
     * 
     */
    @Import(name="includeTableAlterOperations")
    private @Nullable Output includeTableAlterOperations;

    /**
     * @return Includes any data definition language (DDL) operations that change the table in the control data, such as `rename-table`, `drop-table`, `add-column`, `drop-column`, and `rename-column`. Default is `false`.
     * 
     */
    public Optional> includeTableAlterOperations() {
        return Optional.ofNullable(this.includeTableAlterOperations);
    }

    /**
     * Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for `transaction_id`, previous `transaction_id`, and `transaction_record_id` (the record offset within a transaction). Default is `false`.
     * 
     */
    @Import(name="includeTransactionDetails")
    private @Nullable Output includeTransactionDetails;

    /**
     * @return Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for `transaction_id`, previous `transaction_id`, and `transaction_record_id` (the record offset within a transaction). Default is `false`.
     * 
     */
    public Optional> includeTransactionDetails() {
        return Optional.ofNullable(this.includeTransactionDetails);
    }

    /**
     * Output format for the records created on the endpoint. Message format is `JSON` (default) or `JSON_UNFORMATTED` (a single line with no tab).
     * 
     */
    @Import(name="messageFormat")
    private @Nullable Output messageFormat;

    /**
     * @return Output format for the records created on the endpoint. Message format is `JSON` (default) or `JSON_UNFORMATTED` (a single line with no tab).
     * 
     */
    public Optional> messageFormat() {
        return Optional.ofNullable(this.messageFormat);
    }

    /**
     * Maximum size in bytes for records created on the endpoint Default is `1,000,000`.
     * 
     */
    @Import(name="messageMaxBytes")
    private @Nullable Output messageMaxBytes;

    /**
     * @return Maximum size in bytes for records created on the endpoint Default is `1,000,000`.
     * 
     */
    public Optional> messageMaxBytes() {
        return Optional.ofNullable(this.messageMaxBytes);
    }

    /**
     * Set this optional parameter to true to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, AWS DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the `no_hex_prefix` endpoint setting to enable migration of RAW data type columns without adding the `'0x'` prefix.
     * 
     */
    @Import(name="noHexPrefix")
    private @Nullable Output noHexPrefix;

    /**
     * @return Set this optional parameter to true to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, AWS DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the `no_hex_prefix` endpoint setting to enable migration of RAW data type columns without adding the `'0x'` prefix.
     * 
     */
    public Optional> noHexPrefix() {
        return Optional.ofNullable(this.noHexPrefix);
    }

    /**
     * Prefixes schema and table names to partition values, when the partition type is `primary-key-type`. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. Default is `false`.
     * 
     */
    @Import(name="partitionIncludeSchemaTable")
    private @Nullable Output partitionIncludeSchemaTable;

    /**
     * @return Prefixes schema and table names to partition values, when the partition type is `primary-key-type`. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. Default is `false`.
     * 
     */
    public Optional> partitionIncludeSchemaTable() {
        return Optional.ofNullable(this.partitionIncludeSchemaTable);
    }

    /**
     * Secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
     * 
     */
    @Import(name="saslPassword")
    private @Nullable Output saslPassword;

    /**
     * @return Secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
     * 
     */
    public Optional> saslPassword() {
        return Optional.ofNullable(this.saslPassword);
    }

    /**
     * Secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
     * 
     */
    @Import(name="saslUsername")
    private @Nullable Output saslUsername;

    /**
     * @return Secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
     * 
     */
    public Optional> saslUsername() {
        return Optional.ofNullable(this.saslUsername);
    }

    /**
     * Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include `ssl-encryption`, `ssl-authentication`, and `sasl-ssl`. `sasl-ssl` requires `sasl_username` and `sasl_password`.
     * 
     */
    @Import(name="securityProtocol")
    private @Nullable Output securityProtocol;

    /**
     * @return Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include `ssl-encryption`, `ssl-authentication`, and `sasl-ssl`. `sasl-ssl` requires `sasl_username` and `sasl_password`.
     * 
     */
    public Optional> securityProtocol() {
        return Optional.ofNullable(this.securityProtocol);
    }

    /**
     * ARN for the private certificate authority (CA) cert that AWS DMS uses to securely connect to your Kafka target endpoint.
     * 
     */
    @Import(name="sslCaCertificateArn")
    private @Nullable Output sslCaCertificateArn;

    /**
     * @return ARN for the private certificate authority (CA) cert that AWS DMS uses to securely connect to your Kafka target endpoint.
     * 
     */
    public Optional> sslCaCertificateArn() {
        return Optional.ofNullable(this.sslCaCertificateArn);
    }

    /**
     * ARN of the client certificate used to securely connect to a Kafka target endpoint.
     * 
     */
    @Import(name="sslClientCertificateArn")
    private @Nullable Output sslClientCertificateArn;

    /**
     * @return ARN of the client certificate used to securely connect to a Kafka target endpoint.
     * 
     */
    public Optional> sslClientCertificateArn() {
        return Optional.ofNullable(this.sslClientCertificateArn);
    }

    /**
     * ARN for the client private key used to securely connect to a Kafka target endpoint.
     * 
     */
    @Import(name="sslClientKeyArn")
    private @Nullable Output sslClientKeyArn;

    /**
     * @return ARN for the client private key used to securely connect to a Kafka target endpoint.
     * 
     */
    public Optional> sslClientKeyArn() {
        return Optional.ofNullable(this.sslClientKeyArn);
    }

    /**
     * Password for the client private key used to securely connect to a Kafka target endpoint.
     * 
     */
    @Import(name="sslClientKeyPassword")
    private @Nullable Output sslClientKeyPassword;

    /**
     * @return Password for the client private key used to securely connect to a Kafka target endpoint.
     * 
     */
    public Optional> sslClientKeyPassword() {
        return Optional.ofNullable(this.sslClientKeyPassword);
    }

    /**
     * Kafka topic for migration. Default is `kafka-default-topic`.
     * 
     */
    @Import(name="topic")
    private @Nullable Output topic;

    /**
     * @return Kafka topic for migration. Default is `kafka-default-topic`.
     * 
     */
    public Optional> topic() {
        return Optional.ofNullable(this.topic);
    }

    private EndpointKafkaSettingsArgs() {}

    private EndpointKafkaSettingsArgs(EndpointKafkaSettingsArgs $) {
        this.broker = $.broker;
        this.includeControlDetails = $.includeControlDetails;
        this.includeNullAndEmpty = $.includeNullAndEmpty;
        this.includePartitionValue = $.includePartitionValue;
        this.includeTableAlterOperations = $.includeTableAlterOperations;
        this.includeTransactionDetails = $.includeTransactionDetails;
        this.messageFormat = $.messageFormat;
        this.messageMaxBytes = $.messageMaxBytes;
        this.noHexPrefix = $.noHexPrefix;
        this.partitionIncludeSchemaTable = $.partitionIncludeSchemaTable;
        this.saslPassword = $.saslPassword;
        this.saslUsername = $.saslUsername;
        this.securityProtocol = $.securityProtocol;
        this.sslCaCertificateArn = $.sslCaCertificateArn;
        this.sslClientCertificateArn = $.sslClientCertificateArn;
        this.sslClientKeyArn = $.sslClientKeyArn;
        this.sslClientKeyPassword = $.sslClientKeyPassword;
        this.topic = $.topic;
    }

    public static Builder builder() {
        return new Builder();
    }
    public static Builder builder(EndpointKafkaSettingsArgs defaults) {
        return new Builder(defaults);
    }

    public static final class Builder {
        private EndpointKafkaSettingsArgs $;

        public Builder() {
            $ = new EndpointKafkaSettingsArgs();
        }

        public Builder(EndpointKafkaSettingsArgs defaults) {
            $ = new EndpointKafkaSettingsArgs(Objects.requireNonNull(defaults));
        }

        /**
         * @param broker Kafka broker location. Specify in the form broker-hostname-or-ip:port.
         * 
         * @return builder
         * 
         */
        public Builder broker(Output broker) {
            $.broker = broker;
            return this;
        }

        /**
         * @param broker Kafka broker location. Specify in the form broker-hostname-or-ip:port.
         * 
         * @return builder
         * 
         */
        public Builder broker(String broker) {
            return broker(Output.of(broker));
        }

        /**
         * @param includeControlDetails Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. Default is `false`.
         * 
         * @return builder
         * 
         */
        public Builder includeControlDetails(@Nullable Output includeControlDetails) {
            $.includeControlDetails = includeControlDetails;
            return this;
        }

        /**
         * @param includeControlDetails Shows detailed control information for table definition, column definition, and table and column changes in the Kafka message output. Default is `false`.
         * 
         * @return builder
         * 
         */
        public Builder includeControlDetails(Boolean includeControlDetails) {
            return includeControlDetails(Output.of(includeControlDetails));
        }

        /**
         * @param includeNullAndEmpty Include NULL and empty columns for records migrated to the endpoint. Default is `false`.
         * 
         * @return builder
         * 
         */
        public Builder includeNullAndEmpty(@Nullable Output includeNullAndEmpty) {
            $.includeNullAndEmpty = includeNullAndEmpty;
            return this;
        }

        /**
         * @param includeNullAndEmpty Include NULL and empty columns for records migrated to the endpoint. Default is `false`.
         * 
         * @return builder
         * 
         */
        public Builder includeNullAndEmpty(Boolean includeNullAndEmpty) {
            return includeNullAndEmpty(Output.of(includeNullAndEmpty));
        }

        /**
         * @param includePartitionValue Shows the partition value within the Kafka message output unless the partition type is `schema-table-type`. Default is `false`.
         * 
         * @return builder
         * 
         */
        public Builder includePartitionValue(@Nullable Output includePartitionValue) {
            $.includePartitionValue = includePartitionValue;
            return this;
        }

        /**
         * @param includePartitionValue Shows the partition value within the Kafka message output unless the partition type is `schema-table-type`. Default is `false`.
         * 
         * @return builder
         * 
         */
        public Builder includePartitionValue(Boolean includePartitionValue) {
            return includePartitionValue(Output.of(includePartitionValue));
        }

        /**
         * @param includeTableAlterOperations Includes any data definition language (DDL) operations that change the table in the control data, such as `rename-table`, `drop-table`, `add-column`, `drop-column`, and `rename-column`. Default is `false`.
         * 
         * @return builder
         * 
         */
        public Builder includeTableAlterOperations(@Nullable Output includeTableAlterOperations) {
            $.includeTableAlterOperations = includeTableAlterOperations;
            return this;
        }

        /**
         * @param includeTableAlterOperations Includes any data definition language (DDL) operations that change the table in the control data, such as `rename-table`, `drop-table`, `add-column`, `drop-column`, and `rename-column`. Default is `false`.
         * 
         * @return builder
         * 
         */
        public Builder includeTableAlterOperations(Boolean includeTableAlterOperations) {
            return includeTableAlterOperations(Output.of(includeTableAlterOperations));
        }

        /**
         * @param includeTransactionDetails Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for `transaction_id`, previous `transaction_id`, and `transaction_record_id` (the record offset within a transaction). Default is `false`.
         * 
         * @return builder
         * 
         */
        public Builder includeTransactionDetails(@Nullable Output includeTransactionDetails) {
            $.includeTransactionDetails = includeTransactionDetails;
            return this;
        }

        /**
         * @param includeTransactionDetails Provides detailed transaction information from the source database. This information includes a commit timestamp, a log position, and values for `transaction_id`, previous `transaction_id`, and `transaction_record_id` (the record offset within a transaction). Default is `false`.
         * 
         * @return builder
         * 
         */
        public Builder includeTransactionDetails(Boolean includeTransactionDetails) {
            return includeTransactionDetails(Output.of(includeTransactionDetails));
        }

        /**
         * @param messageFormat Output format for the records created on the endpoint. Message format is `JSON` (default) or `JSON_UNFORMATTED` (a single line with no tab).
         * 
         * @return builder
         * 
         */
        public Builder messageFormat(@Nullable Output messageFormat) {
            $.messageFormat = messageFormat;
            return this;
        }

        /**
         * @param messageFormat Output format for the records created on the endpoint. Message format is `JSON` (default) or `JSON_UNFORMATTED` (a single line with no tab).
         * 
         * @return builder
         * 
         */
        public Builder messageFormat(String messageFormat) {
            return messageFormat(Output.of(messageFormat));
        }

        /**
         * @param messageMaxBytes Maximum size in bytes for records created on the endpoint Default is `1,000,000`.
         * 
         * @return builder
         * 
         */
        public Builder messageMaxBytes(@Nullable Output messageMaxBytes) {
            $.messageMaxBytes = messageMaxBytes;
            return this;
        }

        /**
         * @param messageMaxBytes Maximum size in bytes for records created on the endpoint Default is `1,000,000`.
         * 
         * @return builder
         * 
         */
        public Builder messageMaxBytes(Integer messageMaxBytes) {
            return messageMaxBytes(Output.of(messageMaxBytes));
        }

        /**
         * @param noHexPrefix Set this optional parameter to true to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, AWS DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the `no_hex_prefix` endpoint setting to enable migration of RAW data type columns without adding the `'0x'` prefix.
         * 
         * @return builder
         * 
         */
        public Builder noHexPrefix(@Nullable Output noHexPrefix) {
            $.noHexPrefix = noHexPrefix;
            return this;
        }

        /**
         * @param noHexPrefix Set this optional parameter to true to avoid adding a '0x' prefix to raw data in hexadecimal format. For example, by default, AWS DMS adds a '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle source to a Kafka target. Use the `no_hex_prefix` endpoint setting to enable migration of RAW data type columns without adding the `'0x'` prefix.
         * 
         * @return builder
         * 
         */
        public Builder noHexPrefix(Boolean noHexPrefix) {
            return noHexPrefix(Output.of(noHexPrefix));
        }

        /**
         * @param partitionIncludeSchemaTable Prefixes schema and table names to partition values, when the partition type is `primary-key-type`. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. Default is `false`.
         * 
         * @return builder
         * 
         */
        public Builder partitionIncludeSchemaTable(@Nullable Output partitionIncludeSchemaTable) {
            $.partitionIncludeSchemaTable = partitionIncludeSchemaTable;
            return this;
        }

        /**
         * @param partitionIncludeSchemaTable Prefixes schema and table names to partition values, when the partition type is `primary-key-type`. Doing this increases data distribution among Kafka partitions. For example, suppose that a SysBench schema has thousands of tables and each table has only limited range for a primary key. In this case, the same primary key is sent from thousands of tables to the same partition, which causes throttling. Default is `false`.
         * 
         * @return builder
         * 
         */
        public Builder partitionIncludeSchemaTable(Boolean partitionIncludeSchemaTable) {
            return partitionIncludeSchemaTable(Output.of(partitionIncludeSchemaTable));
        }

        /**
         * @param saslPassword Secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
         * 
         * @return builder
         * 
         */
        public Builder saslPassword(@Nullable Output saslPassword) {
            $.saslPassword = saslPassword;
            return this;
        }

        /**
         * @param saslPassword Secure password you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
         * 
         * @return builder
         * 
         */
        public Builder saslPassword(String saslPassword) {
            return saslPassword(Output.of(saslPassword));
        }

        /**
         * @param saslUsername Secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
         * 
         * @return builder
         * 
         */
        public Builder saslUsername(@Nullable Output saslUsername) {
            $.saslUsername = saslUsername;
            return this;
        }

        /**
         * @param saslUsername Secure user name you created when you first set up your MSK cluster to validate a client identity and make an encrypted connection between server and client using SASL-SSL authentication.
         * 
         * @return builder
         * 
         */
        public Builder saslUsername(String saslUsername) {
            return saslUsername(Output.of(saslUsername));
        }

        /**
         * @param securityProtocol Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include `ssl-encryption`, `ssl-authentication`, and `sasl-ssl`. `sasl-ssl` requires `sasl_username` and `sasl_password`.
         * 
         * @return builder
         * 
         */
        public Builder securityProtocol(@Nullable Output securityProtocol) {
            $.securityProtocol = securityProtocol;
            return this;
        }

        /**
         * @param securityProtocol Set secure connection to a Kafka target endpoint using Transport Layer Security (TLS). Options include `ssl-encryption`, `ssl-authentication`, and `sasl-ssl`. `sasl-ssl` requires `sasl_username` and `sasl_password`.
         * 
         * @return builder
         * 
         */
        public Builder securityProtocol(String securityProtocol) {
            return securityProtocol(Output.of(securityProtocol));
        }

        /**
         * @param sslCaCertificateArn ARN for the private certificate authority (CA) cert that AWS DMS uses to securely connect to your Kafka target endpoint.
         * 
         * @return builder
         * 
         */
        public Builder sslCaCertificateArn(@Nullable Output sslCaCertificateArn) {
            $.sslCaCertificateArn = sslCaCertificateArn;
            return this;
        }

        /**
         * @param sslCaCertificateArn ARN for the private certificate authority (CA) cert that AWS DMS uses to securely connect to your Kafka target endpoint.
         * 
         * @return builder
         * 
         */
        public Builder sslCaCertificateArn(String sslCaCertificateArn) {
            return sslCaCertificateArn(Output.of(sslCaCertificateArn));
        }

        /**
         * @param sslClientCertificateArn ARN of the client certificate used to securely connect to a Kafka target endpoint.
         * 
         * @return builder
         * 
         */
        public Builder sslClientCertificateArn(@Nullable Output sslClientCertificateArn) {
            $.sslClientCertificateArn = sslClientCertificateArn;
            return this;
        }

        /**
         * @param sslClientCertificateArn ARN of the client certificate used to securely connect to a Kafka target endpoint.
         * 
         * @return builder
         * 
         */
        public Builder sslClientCertificateArn(String sslClientCertificateArn) {
            return sslClientCertificateArn(Output.of(sslClientCertificateArn));
        }

        /**
         * @param sslClientKeyArn ARN for the client private key used to securely connect to a Kafka target endpoint.
         * 
         * @return builder
         * 
         */
        public Builder sslClientKeyArn(@Nullable Output sslClientKeyArn) {
            $.sslClientKeyArn = sslClientKeyArn;
            return this;
        }

        /**
         * @param sslClientKeyArn ARN for the client private key used to securely connect to a Kafka target endpoint.
         * 
         * @return builder
         * 
         */
        public Builder sslClientKeyArn(String sslClientKeyArn) {
            return sslClientKeyArn(Output.of(sslClientKeyArn));
        }

        /**
         * @param sslClientKeyPassword Password for the client private key used to securely connect to a Kafka target endpoint.
         * 
         * @return builder
         * 
         */
        public Builder sslClientKeyPassword(@Nullable Output sslClientKeyPassword) {
            $.sslClientKeyPassword = sslClientKeyPassword;
            return this;
        }

        /**
         * @param sslClientKeyPassword Password for the client private key used to securely connect to a Kafka target endpoint.
         * 
         * @return builder
         * 
         */
        public Builder sslClientKeyPassword(String sslClientKeyPassword) {
            return sslClientKeyPassword(Output.of(sslClientKeyPassword));
        }

        /**
         * @param topic Kafka topic for migration. Default is `kafka-default-topic`.
         * 
         * @return builder
         * 
         */
        public Builder topic(@Nullable Output topic) {
            $.topic = topic;
            return this;
        }

        /**
         * @param topic Kafka topic for migration. Default is `kafka-default-topic`.
         * 
         * @return builder
         * 
         */
        public Builder topic(String topic) {
            return topic(Output.of(topic));
        }

        public EndpointKafkaSettingsArgs build() {
            if ($.broker == null) {
                throw new MissingRequiredPropertyException("EndpointKafkaSettingsArgs", "broker");
            }
            return $;
        }
    }

}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy