All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.dataloss.kotlin.PreventionJobTriggerArgs.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 8.10.0.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.dataloss.kotlin

import com.pulumi.core.Output
import com.pulumi.core.Output.of
import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs.builder
import com.pulumi.gcp.dataloss.kotlin.inputs.PreventionJobTriggerInspectJobArgs
import com.pulumi.gcp.dataloss.kotlin.inputs.PreventionJobTriggerInspectJobArgsBuilder
import com.pulumi.gcp.dataloss.kotlin.inputs.PreventionJobTriggerTriggerArgs
import com.pulumi.gcp.dataloss.kotlin.inputs.PreventionJobTriggerTriggerArgsBuilder
import com.pulumi.kotlin.ConvertibleToJava
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.applySuspend
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.List
import kotlin.jvm.JvmName

/**
 * A job trigger configuration.
 * To get more information about JobTrigger, see:
 * * [API documentation](https://cloud.google.com/dlp/docs/reference/rest/v2/projects.jobTriggers)
 * * How-to Guides
 *     * [Official Documentation](https://cloud.google.com/dlp/docs/creating-job-triggers)
 * ## Example Usage
 * ### Dlp Job Trigger Basic
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const basic = new gcp.dataloss.PreventionJobTrigger("basic", {
 *     parent: "projects/my-project-name",
 *     description: "Description",
 *     displayName: "Displayname",
 *     triggers: [{
 *         schedule: {
 *             recurrencePeriodDuration: "86400s",
 *         },
 *     }],
 *     inspectJob: {
 *         inspectTemplateName: "fake",
 *         actions: [{
 *             saveFindings: {
 *                 outputConfig: {
 *                     table: {
 *                         projectId: "project",
 *                         datasetId: "dataset",
 *                     },
 *                 },
 *             },
 *         }],
 *         storageConfig: {
 *             cloudStorageOptions: {
 *                 fileSet: {
 *                     url: "gs://mybucket/directory/",
 *                 },
 *             },
 *         },
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * basic = gcp.dataloss.PreventionJobTrigger("basic",
 *     parent="projects/my-project-name",
 *     description="Description",
 *     display_name="Displayname",
 *     triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
 *         schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
 *             recurrence_period_duration="86400s",
 *         ),
 *     )],
 *     inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
 *         inspect_template_name="fake",
 *         actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
 *             save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
 *                 output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
 *                     table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
 *                         project_id="project",
 *                         dataset_id="dataset",
 *                     ),
 *                 ),
 *             ),
 *         )],
 *         storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
 *             cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
 *                 file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
 *                     url="gs://mybucket/directory/",
 *                 ),
 *             ),
 *         ),
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var basic = new Gcp.DataLoss.PreventionJobTrigger("basic", new()
 *     {
 *         Parent = "projects/my-project-name",
 *         Description = "Description",
 *         DisplayName = "Displayname",
 *         Triggers = new[]
 *         {
 *             new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
 *             {
 *                 Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
 *                 {
 *                     RecurrencePeriodDuration = "86400s",
 *                 },
 *             },
 *         },
 *         InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
 *         {
 *             InspectTemplateName = "fake",
 *             Actions = new[]
 *             {
 *                 new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
 *                 {
 *                     SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
 *                     {
 *                         OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
 *                         {
 *                             Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
 *                             {
 *                                 ProjectId = "project",
 *                                 DatasetId = "dataset",
 *                             },
 *                         },
 *                     },
 *                 },
 *             },
 *             StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
 *             {
 *                 CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
 *                 {
 *                     FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
 *                     {
 *                         Url = "gs://mybucket/directory/",
 *                     },
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataloss.NewPreventionJobTrigger(ctx, "basic", &dataloss.PreventionJobTriggerArgs{
 * 			Parent:      pulumi.String("projects/my-project-name"),
 * 			Description: pulumi.String("Description"),
 * 			DisplayName: pulumi.String("Displayname"),
 * 			Triggers: dataloss.PreventionJobTriggerTriggerArray{
 * 				&dataloss.PreventionJobTriggerTriggerArgs{
 * 					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
 * 						RecurrencePeriodDuration: pulumi.String("86400s"),
 * 					},
 * 				},
 * 			},
 * 			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
 * 				InspectTemplateName: pulumi.String("fake"),
 * 				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
 * 					&dataloss.PreventionJobTriggerInspectJobActionArgs{
 * 						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
 * 							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
 * 								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
 * 									ProjectId: pulumi.String("project"),
 * 									DatasetId: pulumi.String("dataset"),
 * 								},
 * 							},
 * 						},
 * 					},
 * 				},
 * 				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
 * 					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
 * 						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
 * 							Url: pulumi.String("gs://mybucket/directory/"),
 * 						},
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataloss.PreventionJobTrigger;
 * import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var basic = new PreventionJobTrigger("basic", PreventionJobTriggerArgs.builder()
 *             .parent("projects/my-project-name")
 *             .description("Description")
 *             .displayName("Displayname")
 *             .triggers(PreventionJobTriggerTriggerArgs.builder()
 *                 .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
 *                     .recurrencePeriodDuration("86400s")
 *                     .build())
 *                 .build())
 *             .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
 *                 .inspectTemplateName("fake")
 *                 .actions(PreventionJobTriggerInspectJobActionArgs.builder()
 *                     .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
 *                         .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
 *                             .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
 *                                 .projectId("project")
 *                                 .datasetId("dataset")
 *                                 .build())
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
 *                     .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
 *                         .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
 *                             .url("gs://mybucket/directory/")
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   basic:
 *     type: gcp:dataloss:PreventionJobTrigger
 *     properties:
 *       parent: projects/my-project-name
 *       description: Description
 *       displayName: Displayname
 *       triggers:
 *         - schedule:
 *             recurrencePeriodDuration: 86400s
 *       inspectJob:
 *         inspectTemplateName: fake
 *         actions:
 *           - saveFindings:
 *               outputConfig:
 *                 table:
 *                   projectId: project
 *                   datasetId: dataset
 *         storageConfig:
 *           cloudStorageOptions:
 *             fileSet:
 *               url: gs://mybucket/directory/
 * ```
 * 
 * ### Dlp Job Trigger Bigquery Row Limit
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const bigqueryRowLimit = new gcp.dataloss.PreventionJobTrigger("bigquery_row_limit", {
 *     parent: "projects/my-project-name",
 *     description: "Description",
 *     displayName: "Displayname",
 *     triggers: [{
 *         schedule: {
 *             recurrencePeriodDuration: "86400s",
 *         },
 *     }],
 *     inspectJob: {
 *         inspectTemplateName: "fake",
 *         actions: [{
 *             saveFindings: {
 *                 outputConfig: {
 *                     table: {
 *                         projectId: "project",
 *                         datasetId: "dataset",
 *                     },
 *                 },
 *             },
 *         }],
 *         storageConfig: {
 *             bigQueryOptions: {
 *                 tableReference: {
 *                     projectId: "project",
 *                     datasetId: "dataset",
 *                     tableId: "table_to_scan",
 *                 },
 *                 rowsLimit: 1000,
 *                 sampleMethod: "RANDOM_START",
 *             },
 *         },
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * bigquery_row_limit = gcp.dataloss.PreventionJobTrigger("bigquery_row_limit",
 *     parent="projects/my-project-name",
 *     description="Description",
 *     display_name="Displayname",
 *     triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
 *         schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
 *             recurrence_period_duration="86400s",
 *         ),
 *     )],
 *     inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
 *         inspect_template_name="fake",
 *         actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
 *             save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
 *                 output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
 *                     table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
 *                         project_id="project",
 *                         dataset_id="dataset",
 *                     ),
 *                 ),
 *             ),
 *         )],
 *         storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
 *             big_query_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs(
 *                 table_reference=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs(
 *                     project_id="project",
 *                     dataset_id="dataset",
 *                     table_id="table_to_scan",
 *                 ),
 *                 rows_limit=1000,
 *                 sample_method="RANDOM_START",
 *             ),
 *         ),
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var bigqueryRowLimit = new Gcp.DataLoss.PreventionJobTrigger("bigquery_row_limit", new()
 *     {
 *         Parent = "projects/my-project-name",
 *         Description = "Description",
 *         DisplayName = "Displayname",
 *         Triggers = new[]
 *         {
 *             new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
 *             {
 *                 Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
 *                 {
 *                     RecurrencePeriodDuration = "86400s",
 *                 },
 *             },
 *         },
 *         InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
 *         {
 *             InspectTemplateName = "fake",
 *             Actions = new[]
 *             {
 *                 new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
 *                 {
 *                     SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
 *                     {
 *                         OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
 *                         {
 *                             Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
 *                             {
 *                                 ProjectId = "project",
 *                                 DatasetId = "dataset",
 *                             },
 *                         },
 *                     },
 *                 },
 *             },
 *             StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
 *             {
 *                 BigQueryOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs
 *                 {
 *                     TableReference = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs
 *                     {
 *                         ProjectId = "project",
 *                         DatasetId = "dataset",
 *                         TableId = "table_to_scan",
 *                     },
 *                     RowsLimit = 1000,
 *                     SampleMethod = "RANDOM_START",
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataloss.NewPreventionJobTrigger(ctx, "bigquery_row_limit", &dataloss.PreventionJobTriggerArgs{
 * 			Parent:      pulumi.String("projects/my-project-name"),
 * 			Description: pulumi.String("Description"),
 * 			DisplayName: pulumi.String("Displayname"),
 * 			Triggers: dataloss.PreventionJobTriggerTriggerArray{
 * 				&dataloss.PreventionJobTriggerTriggerArgs{
 * 					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
 * 						RecurrencePeriodDuration: pulumi.String("86400s"),
 * 					},
 * 				},
 * 			},
 * 			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
 * 				InspectTemplateName: pulumi.String("fake"),
 * 				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
 * 					&dataloss.PreventionJobTriggerInspectJobActionArgs{
 * 						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
 * 							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
 * 								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
 * 									ProjectId: pulumi.String("project"),
 * 									DatasetId: pulumi.String("dataset"),
 * 								},
 * 							},
 * 						},
 * 					},
 * 				},
 * 				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
 * 					BigQueryOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs{
 * 						TableReference: &dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs{
 * 							ProjectId: pulumi.String("project"),
 * 							DatasetId: pulumi.String("dataset"),
 * 							TableId:   pulumi.String("table_to_scan"),
 * 						},
 * 						RowsLimit:    pulumi.Int(1000),
 * 						SampleMethod: pulumi.String("RANDOM_START"),
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataloss.PreventionJobTrigger;
 * import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var bigqueryRowLimit = new PreventionJobTrigger("bigqueryRowLimit", PreventionJobTriggerArgs.builder()
 *             .parent("projects/my-project-name")
 *             .description("Description")
 *             .displayName("Displayname")
 *             .triggers(PreventionJobTriggerTriggerArgs.builder()
 *                 .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
 *                     .recurrencePeriodDuration("86400s")
 *                     .build())
 *                 .build())
 *             .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
 *                 .inspectTemplateName("fake")
 *                 .actions(PreventionJobTriggerInspectJobActionArgs.builder()
 *                     .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
 *                         .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
 *                             .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
 *                                 .projectId("project")
 *                                 .datasetId("dataset")
 *                                 .build())
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
 *                     .bigQueryOptions(PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs.builder()
 *                         .tableReference(PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs.builder()
 *                             .projectId("project")
 *                             .datasetId("dataset")
 *                             .tableId("table_to_scan")
 *                             .build())
 *                         .rowsLimit(1000)
 *                         .sampleMethod("RANDOM_START")
 *                         .build())
 *                     .build())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   bigqueryRowLimit:
 *     type: gcp:dataloss:PreventionJobTrigger
 *     name: bigquery_row_limit
 *     properties:
 *       parent: projects/my-project-name
 *       description: Description
 *       displayName: Displayname
 *       triggers:
 *         - schedule:
 *             recurrencePeriodDuration: 86400s
 *       inspectJob:
 *         inspectTemplateName: fake
 *         actions:
 *           - saveFindings:
 *               outputConfig:
 *                 table:
 *                   projectId: project
 *                   datasetId: dataset
 *         storageConfig:
 *           bigQueryOptions:
 *             tableReference:
 *               projectId: project
 *               datasetId: dataset
 *               tableId: table_to_scan
 *             rowsLimit: 1000
 *             sampleMethod: RANDOM_START
 * ```
 * 
 * ### Dlp Job Trigger Bigquery Row Limit Percentage
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const bigqueryRowLimitPercentage = new gcp.dataloss.PreventionJobTrigger("bigquery_row_limit_percentage", {
 *     parent: "projects/my-project-name",
 *     description: "Description",
 *     displayName: "Displayname",
 *     triggers: [{
 *         schedule: {
 *             recurrencePeriodDuration: "86400s",
 *         },
 *     }],
 *     inspectJob: {
 *         inspectTemplateName: "fake",
 *         actions: [{
 *             saveFindings: {
 *                 outputConfig: {
 *                     table: {
 *                         projectId: "project",
 *                         datasetId: "dataset",
 *                     },
 *                 },
 *             },
 *         }],
 *         storageConfig: {
 *             bigQueryOptions: {
 *                 tableReference: {
 *                     projectId: "project",
 *                     datasetId: "dataset",
 *                     tableId: "table_to_scan",
 *                 },
 *                 rowsLimitPercent: 50,
 *                 sampleMethod: "RANDOM_START",
 *             },
 *         },
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * bigquery_row_limit_percentage = gcp.dataloss.PreventionJobTrigger("bigquery_row_limit_percentage",
 *     parent="projects/my-project-name",
 *     description="Description",
 *     display_name="Displayname",
 *     triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
 *         schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
 *             recurrence_period_duration="86400s",
 *         ),
 *     )],
 *     inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
 *         inspect_template_name="fake",
 *         actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
 *             save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
 *                 output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
 *                     table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
 *                         project_id="project",
 *                         dataset_id="dataset",
 *                     ),
 *                 ),
 *             ),
 *         )],
 *         storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
 *             big_query_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs(
 *                 table_reference=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs(
 *                     project_id="project",
 *                     dataset_id="dataset",
 *                     table_id="table_to_scan",
 *                 ),
 *                 rows_limit_percent=50,
 *                 sample_method="RANDOM_START",
 *             ),
 *         ),
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var bigqueryRowLimitPercentage = new Gcp.DataLoss.PreventionJobTrigger("bigquery_row_limit_percentage", new()
 *     {
 *         Parent = "projects/my-project-name",
 *         Description = "Description",
 *         DisplayName = "Displayname",
 *         Triggers = new[]
 *         {
 *             new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
 *             {
 *                 Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
 *                 {
 *                     RecurrencePeriodDuration = "86400s",
 *                 },
 *             },
 *         },
 *         InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
 *         {
 *             InspectTemplateName = "fake",
 *             Actions = new[]
 *             {
 *                 new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
 *                 {
 *                     SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
 *                     {
 *                         OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
 *                         {
 *                             Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
 *                             {
 *                                 ProjectId = "project",
 *                                 DatasetId = "dataset",
 *                             },
 *                         },
 *                     },
 *                 },
 *             },
 *             StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
 *             {
 *                 BigQueryOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs
 *                 {
 *                     TableReference = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs
 *                     {
 *                         ProjectId = "project",
 *                         DatasetId = "dataset",
 *                         TableId = "table_to_scan",
 *                     },
 *                     RowsLimitPercent = 50,
 *                     SampleMethod = "RANDOM_START",
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataloss.NewPreventionJobTrigger(ctx, "bigquery_row_limit_percentage", &dataloss.PreventionJobTriggerArgs{
 * 			Parent:      pulumi.String("projects/my-project-name"),
 * 			Description: pulumi.String("Description"),
 * 			DisplayName: pulumi.String("Displayname"),
 * 			Triggers: dataloss.PreventionJobTriggerTriggerArray{
 * 				&dataloss.PreventionJobTriggerTriggerArgs{
 * 					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
 * 						RecurrencePeriodDuration: pulumi.String("86400s"),
 * 					},
 * 				},
 * 			},
 * 			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
 * 				InspectTemplateName: pulumi.String("fake"),
 * 				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
 * 					&dataloss.PreventionJobTriggerInspectJobActionArgs{
 * 						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
 * 							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
 * 								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
 * 									ProjectId: pulumi.String("project"),
 * 									DatasetId: pulumi.String("dataset"),
 * 								},
 * 							},
 * 						},
 * 					},
 * 				},
 * 				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
 * 					BigQueryOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs{
 * 						TableReference: &dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs{
 * 							ProjectId: pulumi.String("project"),
 * 							DatasetId: pulumi.String("dataset"),
 * 							TableId:   pulumi.String("table_to_scan"),
 * 						},
 * 						RowsLimitPercent: pulumi.Int(50),
 * 						SampleMethod:     pulumi.String("RANDOM_START"),
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataloss.PreventionJobTrigger;
 * import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var bigqueryRowLimitPercentage = new PreventionJobTrigger("bigqueryRowLimitPercentage", PreventionJobTriggerArgs.builder()
 *             .parent("projects/my-project-name")
 *             .description("Description")
 *             .displayName("Displayname")
 *             .triggers(PreventionJobTriggerTriggerArgs.builder()
 *                 .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
 *                     .recurrencePeriodDuration("86400s")
 *                     .build())
 *                 .build())
 *             .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
 *                 .inspectTemplateName("fake")
 *                 .actions(PreventionJobTriggerInspectJobActionArgs.builder()
 *                     .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
 *                         .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
 *                             .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
 *                                 .projectId("project")
 *                                 .datasetId("dataset")
 *                                 .build())
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
 *                     .bigQueryOptions(PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs.builder()
 *                         .tableReference(PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs.builder()
 *                             .projectId("project")
 *                             .datasetId("dataset")
 *                             .tableId("table_to_scan")
 *                             .build())
 *                         .rowsLimitPercent(50)
 *                         .sampleMethod("RANDOM_START")
 *                         .build())
 *                     .build())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   bigqueryRowLimitPercentage:
 *     type: gcp:dataloss:PreventionJobTrigger
 *     name: bigquery_row_limit_percentage
 *     properties:
 *       parent: projects/my-project-name
 *       description: Description
 *       displayName: Displayname
 *       triggers:
 *         - schedule:
 *             recurrencePeriodDuration: 86400s
 *       inspectJob:
 *         inspectTemplateName: fake
 *         actions:
 *           - saveFindings:
 *               outputConfig:
 *                 table:
 *                   projectId: project
 *                   datasetId: dataset
 *         storageConfig:
 *           bigQueryOptions:
 *             tableReference:
 *               projectId: project
 *               datasetId: dataset
 *               tableId: table_to_scan
 *             rowsLimitPercent: 50
 *             sampleMethod: RANDOM_START
 * ```
 * 
 * ### Dlp Job Trigger Job Notification Emails
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const jobNotificationEmails = new gcp.dataloss.PreventionJobTrigger("job_notification_emails", {
 *     parent: "projects/my-project-name",
 *     description: "Description for the job_trigger created by terraform",
 *     displayName: "TerraformDisplayName",
 *     triggers: [{
 *         schedule: {
 *             recurrencePeriodDuration: "86400s",
 *         },
 *     }],
 *     inspectJob: {
 *         inspectTemplateName: "sample-inspect-template",
 *         actions: [{
 *             jobNotificationEmails: {},
 *         }],
 *         storageConfig: {
 *             cloudStorageOptions: {
 *                 fileSet: {
 *                     url: "gs://mybucket/directory/",
 *                 },
 *             },
 *         },
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * job_notification_emails = gcp.dataloss.PreventionJobTrigger("job_notification_emails",
 *     parent="projects/my-project-name",
 *     description="Description for the job_trigger created by terraform",
 *     display_name="TerraformDisplayName",
 *     triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
 *         schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
 *             recurrence_period_duration="86400s",
 *         ),
 *     )],
 *     inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
 *         inspect_template_name="sample-inspect-template",
 *         actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
 *             job_notification_emails=gcp.dataloss.PreventionJobTriggerInspectJobActionJobNotificationEmailsArgs(),
 *         )],
 *         storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
 *             cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
 *                 file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
 *                     url="gs://mybucket/directory/",
 *                 ),
 *             ),
 *         ),
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var jobNotificationEmails = new Gcp.DataLoss.PreventionJobTrigger("job_notification_emails", new()
 *     {
 *         Parent = "projects/my-project-name",
 *         Description = "Description for the job_trigger created by terraform",
 *         DisplayName = "TerraformDisplayName",
 *         Triggers = new[]
 *         {
 *             new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
 *             {
 *                 Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
 *                 {
 *                     RecurrencePeriodDuration = "86400s",
 *                 },
 *             },
 *         },
 *         InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
 *         {
 *             InspectTemplateName = "sample-inspect-template",
 *             Actions = new[]
 *             {
 *                 new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
 *                 {
 *                     JobNotificationEmails = null,
 *                 },
 *             },
 *             StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
 *             {
 *                 CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
 *                 {
 *                     FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
 *                     {
 *                         Url = "gs://mybucket/directory/",
 *                     },
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataloss.NewPreventionJobTrigger(ctx, "job_notification_emails", &dataloss.PreventionJobTriggerArgs{
 * 			Parent:      pulumi.String("projects/my-project-name"),
 * 			Description: pulumi.String("Description for the job_trigger created by terraform"),
 * 			DisplayName: pulumi.String("TerraformDisplayName"),
 * 			Triggers: dataloss.PreventionJobTriggerTriggerArray{
 * 				&dataloss.PreventionJobTriggerTriggerArgs{
 * 					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
 * 						RecurrencePeriodDuration: pulumi.String("86400s"),
 * 					},
 * 				},
 * 			},
 * 			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
 * 				InspectTemplateName: pulumi.String("sample-inspect-template"),
 * 				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
 * 					&dataloss.PreventionJobTriggerInspectJobActionArgs{
 * 						JobNotificationEmails: nil,
 * 					},
 * 				},
 * 				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
 * 					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
 * 						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
 * 							Url: pulumi.String("gs://mybucket/directory/"),
 * 						},
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataloss.PreventionJobTrigger;
 * import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var jobNotificationEmails = new PreventionJobTrigger("jobNotificationEmails", PreventionJobTriggerArgs.builder()
 *             .parent("projects/my-project-name")
 *             .description("Description for the job_trigger created by terraform")
 *             .displayName("TerraformDisplayName")
 *             .triggers(PreventionJobTriggerTriggerArgs.builder()
 *                 .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
 *                     .recurrencePeriodDuration("86400s")
 *                     .build())
 *                 .build())
 *             .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
 *                 .inspectTemplateName("sample-inspect-template")
 *                 .actions(PreventionJobTriggerInspectJobActionArgs.builder()
 *                     .jobNotificationEmails()
 *                     .build())
 *                 .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
 *                     .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
 *                         .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
 *                             .url("gs://mybucket/directory/")
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   jobNotificationEmails:
 *     type: gcp:dataloss:PreventionJobTrigger
 *     name: job_notification_emails
 *     properties:
 *       parent: projects/my-project-name
 *       description: Description for the job_trigger created by terraform
 *       displayName: TerraformDisplayName
 *       triggers:
 *         - schedule:
 *             recurrencePeriodDuration: 86400s
 *       inspectJob:
 *         inspectTemplateName: sample-inspect-template
 *         actions:
 *           - jobNotificationEmails: {}
 *         storageConfig:
 *           cloudStorageOptions:
 *             fileSet:
 *               url: gs://mybucket/directory/
 * ```
 * 
 * ### Dlp Job Trigger Deidentify
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const _default = new gcp.bigquery.Dataset("default", {
 *     datasetId: "tf_test",
 *     friendlyName: "terraform-test",
 *     description: "Description for the dataset created by terraform",
 *     location: "US",
 *     defaultTableExpirationMs: 3600000,
 *     labels: {
 *         env: "default",
 *     },
 * });
 * const defaultTable = new gcp.bigquery.Table("default", {
 *     datasetId: _default.datasetId,
 *     tableId: "tf_test",
 *     deletionProtection: false,
 *     timePartitioning: {
 *         type: "DAY",
 *     },
 *     labels: {
 *         env: "default",
 *     },
 *     schema: `    [
 *     {
 *       "name": "quantity",
 *       "type": "NUMERIC",
 *       "mode": "NULLABLE",
 *       "description": "The quantity"
 *     },
 *     {
 *       "name": "name",
 *       "type": "STRING",
 *       "mode": "NULLABLE",
 *       "description": "Name of the object"
 *     }
 *     ]
 * `,
 * });
 * const deidentify = new gcp.dataloss.PreventionJobTrigger("deidentify", {
 *     parent: "projects/my-project-name",
 *     description: "Description for the job_trigger created by terraform",
 *     displayName: "TerraformDisplayName",
 *     triggers: [{
 *         schedule: {
 *             recurrencePeriodDuration: "86400s",
 *         },
 *     }],
 *     inspectJob: {
 *         inspectTemplateName: "sample-inspect-template",
 *         actions: [{
 *             deidentify: {
 *                 cloudStorageOutput: "gs://samplebucket/dir/",
 *                 fileTypesToTransforms: [
 *                     "CSV",
 *                     "TSV",
 *                 ],
 *                 transformationDetailsStorageConfig: {
 *                     table: {
 *                         projectId: "my-project-name",
 *                         datasetId: _default.datasetId,
 *                         tableId: defaultTable.tableId,
 *                     },
 *                 },
 *                 transformationConfig: {
 *                     deidentifyTemplate: "sample-deidentify-template",
 *                     imageRedactTemplate: "sample-image-redact-template",
 *                     structuredDeidentifyTemplate: "sample-structured-deidentify-template",
 *                 },
 *             },
 *         }],
 *         storageConfig: {
 *             cloudStorageOptions: {
 *                 fileSet: {
 *                     url: "gs://mybucket/directory/",
 *                 },
 *             },
 *         },
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * default = gcp.bigquery.Dataset("default",
 *     dataset_id="tf_test",
 *     friendly_name="terraform-test",
 *     description="Description for the dataset created by terraform",
 *     location="US",
 *     default_table_expiration_ms=3600000,
 *     labels={
 *         "env": "default",
 *     })
 * default_table = gcp.bigquery.Table("default",
 *     dataset_id=default.dataset_id,
 *     table_id="tf_test",
 *     deletion_protection=False,
 *     time_partitioning=gcp.bigquery.TableTimePartitioningArgs(
 *         type="DAY",
 *     ),
 *     labels={
 *         "env": "default",
 *     },
 *     schema="""    [
 *     {
 *       "name": "quantity",
 *       "type": "NUMERIC",
 *       "mode": "NULLABLE",
 *       "description": "The quantity"
 *     },
 *     {
 *       "name": "name",
 *       "type": "STRING",
 *       "mode": "NULLABLE",
 *       "description": "Name of the object"
 *     }
 *     ]
 * """)
 * deidentify = gcp.dataloss.PreventionJobTrigger("deidentify",
 *     parent="projects/my-project-name",
 *     description="Description for the job_trigger created by terraform",
 *     display_name="TerraformDisplayName",
 *     triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
 *         schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
 *             recurrence_period_duration="86400s",
 *         ),
 *     )],
 *     inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
 *         inspect_template_name="sample-inspect-template",
 *         actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
 *             deidentify=gcp.dataloss.PreventionJobTriggerInspectJobActionDeidentifyArgs(
 *                 cloud_storage_output="gs://samplebucket/dir/",
 *                 file_types_to_transforms=[
 *                     "CSV",
 *                     "TSV",
 *                 ],
 *                 transformation_details_storage_config=gcp.dataloss.PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigArgs(
 *                     table=gcp.dataloss.PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTableArgs(
 *                         project_id="my-project-name",
 *                         dataset_id=default.dataset_id,
 *                         table_id=default_table.table_id,
 *                     ),
 *                 ),
 *                 transformation_config=gcp.dataloss.PreventionJobTriggerInspectJobActionDeidentifyTransformationConfigArgs(
 *                     deidentify_template="sample-deidentify-template",
 *                     image_redact_template="sample-image-redact-template",
 *                     structured_deidentify_template="sample-structured-deidentify-template",
 *                 ),
 *             ),
 *         )],
 *         storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
 *             cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
 *                 file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
 *                     url="gs://mybucket/directory/",
 *                 ),
 *             ),
 *         ),
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var @default = new Gcp.BigQuery.Dataset("default", new()
 *     {
 *         DatasetId = "tf_test",
 *         FriendlyName = "terraform-test",
 *         Description = "Description for the dataset created by terraform",
 *         Location = "US",
 *         DefaultTableExpirationMs = 3600000,
 *         Labels =
 *         {
 *             { "env", "default" },
 *         },
 *     });
 *     var defaultTable = new Gcp.BigQuery.Table("default", new()
 *     {
 *         DatasetId = @default.DatasetId,
 *         TableId = "tf_test",
 *         DeletionProtection = false,
 *         TimePartitioning = new Gcp.BigQuery.Inputs.TableTimePartitioningArgs
 *         {
 *             Type = "DAY",
 *         },
 *         Labels =
 *         {
 *             { "env", "default" },
 *         },
 *         Schema = @"    [
 *     {
 *       ""name"": ""quantity"",
 *       ""type"": ""NUMERIC"",
 *       ""mode"": ""NULLABLE"",
 *       ""description"": ""The quantity""
 *     },
 *     {
 *       ""name"": ""name"",
 *       ""type"": ""STRING"",
 *       ""mode"": ""NULLABLE"",
 *       ""description"": ""Name of the object""
 *     }
 *     ]
 * ",
 *     });
 *     var deidentify = new Gcp.DataLoss.PreventionJobTrigger("deidentify", new()
 *     {
 *         Parent = "projects/my-project-name",
 *         Description = "Description for the job_trigger created by terraform",
 *         DisplayName = "TerraformDisplayName",
 *         Triggers = new[]
 *         {
 *             new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
 *             {
 *                 Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
 *                 {
 *                     RecurrencePeriodDuration = "86400s",
 *                 },
 *             },
 *         },
 *         InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
 *         {
 *             InspectTemplateName = "sample-inspect-template",
 *             Actions = new[]
 *             {
 *                 new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
 *                 {
 *                     Deidentify = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionDeidentifyArgs
 *                     {
 *                         CloudStorageOutput = "gs://samplebucket/dir/",
 *                         FileTypesToTransforms = new[]
 *                         {
 *                             "CSV",
 *                             "TSV",
 *                         },
 *                         TransformationDetailsStorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigArgs
 *                         {
 *                             Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTableArgs
 *                             {
 *                                 ProjectId = "my-project-name",
 *                                 DatasetId = @default.DatasetId,
 *                                 TableId = defaultTable.TableId,
 *                             },
 *                         },
 *                         TransformationConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionDeidentifyTransformationConfigArgs
 *                         {
 *                             DeidentifyTemplate = "sample-deidentify-template",
 *                             ImageRedactTemplate = "sample-image-redact-template",
 *                             StructuredDeidentifyTemplate = "sample-structured-deidentify-template",
 *                         },
 *                     },
 *                 },
 *             },
 *             StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
 *             {
 *                 CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
 *                 {
 *                     FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
 *                     {
 *                         Url = "gs://mybucket/directory/",
 *                     },
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := bigquery.NewDataset(ctx, "default", &bigquery.DatasetArgs{
 * 			DatasetId:                pulumi.String("tf_test"),
 * 			FriendlyName:             pulumi.String("terraform-test"),
 * 			Description:              pulumi.String("Description for the dataset created by terraform"),
 * 			Location:                 pulumi.String("US"),
 * 			DefaultTableExpirationMs: pulumi.Int(3600000),
 * 			Labels: pulumi.StringMap{
 * 				"env": pulumi.String("default"),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		defaultTable, err := bigquery.NewTable(ctx, "default", &bigquery.TableArgs{
 * 			DatasetId:          _default.DatasetId,
 * 			TableId:            pulumi.String("tf_test"),
 * 			DeletionProtection: pulumi.Bool(false),
 * 			TimePartitioning: &bigquery.TableTimePartitioningArgs{
 * 				Type: pulumi.String("DAY"),
 * 			},
 * 			Labels: pulumi.StringMap{
 * 				"env": pulumi.String("default"),
 * 			},
 * 			Schema: pulumi.String(`    [
 *     {
 *       "name": "quantity",
 *       "type": "NUMERIC",
 *       "mode": "NULLABLE",
 *       "description": "The quantity"
 *     },
 *     {
 *       "name": "name",
 *       "type": "STRING",
 *       "mode": "NULLABLE",
 *       "description": "Name of the object"
 *     }
 *     ]
 * `),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = dataloss.NewPreventionJobTrigger(ctx, "deidentify", &dataloss.PreventionJobTriggerArgs{
 * 			Parent:      pulumi.String("projects/my-project-name"),
 * 			Description: pulumi.String("Description for the job_trigger created by terraform"),
 * 			DisplayName: pulumi.String("TerraformDisplayName"),
 * 			Triggers: dataloss.PreventionJobTriggerTriggerArray{
 * 				&dataloss.PreventionJobTriggerTriggerArgs{
 * 					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
 * 						RecurrencePeriodDuration: pulumi.String("86400s"),
 * 					},
 * 				},
 * 			},
 * 			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
 * 				InspectTemplateName: pulumi.String("sample-inspect-template"),
 * 				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
 * 					&dataloss.PreventionJobTriggerInspectJobActionArgs{
 * 						Deidentify: &dataloss.PreventionJobTriggerInspectJobActionDeidentifyArgs{
 * 							CloudStorageOutput: pulumi.String("gs://samplebucket/dir/"),
 * 							FileTypesToTransforms: pulumi.StringArray{
 * 								pulumi.String("CSV"),
 * 								pulumi.String("TSV"),
 * 							},
 * 							TransformationDetailsStorageConfig: &dataloss.PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigArgs{
 * 								Table: &dataloss.PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTableArgs{
 * 									ProjectId: pulumi.String("my-project-name"),
 * 									DatasetId: _default.DatasetId,
 * 									TableId:   defaultTable.TableId,
 * 								},
 * 							},
 * 							TransformationConfig: &dataloss.PreventionJobTriggerInspectJobActionDeidentifyTransformationConfigArgs{
 * 								DeidentifyTemplate:           pulumi.String("sample-deidentify-template"),
 * 								ImageRedactTemplate:          pulumi.String("sample-image-redact-template"),
 * 								StructuredDeidentifyTemplate: pulumi.String("sample-structured-deidentify-template"),
 * 							},
 * 						},
 * 					},
 * 				},
 * 				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
 * 					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
 * 						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
 * 							Url: pulumi.String("gs://mybucket/directory/"),
 * 						},
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.bigquery.Dataset;
 * import com.pulumi.gcp.bigquery.DatasetArgs;
 * import com.pulumi.gcp.bigquery.Table;
 * import com.pulumi.gcp.bigquery.TableArgs;
 * import com.pulumi.gcp.bigquery.inputs.TableTimePartitioningArgs;
 * import com.pulumi.gcp.dataloss.PreventionJobTrigger;
 * import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var default_ = new Dataset("default", DatasetArgs.builder()
 *             .datasetId("tf_test")
 *             .friendlyName("terraform-test")
 *             .description("Description for the dataset created by terraform")
 *             .location("US")
 *             .defaultTableExpirationMs(3600000)
 *             .labels(Map.of("env", "default"))
 *             .build());
 *         var defaultTable = new Table("defaultTable", TableArgs.builder()
 *             .datasetId(default_.datasetId())
 *             .tableId("tf_test")
 *             .deletionProtection(false)
 *             .timePartitioning(TableTimePartitioningArgs.builder()
 *                 .type("DAY")
 *                 .build())
 *             .labels(Map.of("env", "default"))
 *             .schema("""
 *     [
 *     {
 *       "name": "quantity",
 *       "type": "NUMERIC",
 *       "mode": "NULLABLE",
 *       "description": "The quantity"
 *     },
 *     {
 *       "name": "name",
 *       "type": "STRING",
 *       "mode": "NULLABLE",
 *       "description": "Name of the object"
 *     }
 *     ]
 *             """)
 *             .build());
 *         var deidentify = new PreventionJobTrigger("deidentify", PreventionJobTriggerArgs.builder()
 *             .parent("projects/my-project-name")
 *             .description("Description for the job_trigger created by terraform")
 *             .displayName("TerraformDisplayName")
 *             .triggers(PreventionJobTriggerTriggerArgs.builder()
 *                 .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
 *                     .recurrencePeriodDuration("86400s")
 *                     .build())
 *                 .build())
 *             .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
 *                 .inspectTemplateName("sample-inspect-template")
 *                 .actions(PreventionJobTriggerInspectJobActionArgs.builder()
 *                     .deidentify(PreventionJobTriggerInspectJobActionDeidentifyArgs.builder()
 *                         .cloudStorageOutput("gs://samplebucket/dir/")
 *                         .fileTypesToTransforms(
 *                             "CSV",
 *                             "TSV")
 *                         .transformationDetailsStorageConfig(PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigArgs.builder()
 *                             .table(PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTableArgs.builder()
 *                                 .projectId("my-project-name")
 *                                 .datasetId(default_.datasetId())
 *                                 .tableId(defaultTable.tableId())
 *                                 .build())
 *                             .build())
 *                         .transformationConfig(PreventionJobTriggerInspectJobActionDeidentifyTransformationConfigArgs.builder()
 *                             .deidentifyTemplate("sample-deidentify-template")
 *                             .imageRedactTemplate("sample-image-redact-template")
 *                             .structuredDeidentifyTemplate("sample-structured-deidentify-template")
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
 *                     .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
 *                         .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
 *                             .url("gs://mybucket/directory/")
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   deidentify:
 *     type: gcp:dataloss:PreventionJobTrigger
 *     properties:
 *       parent: projects/my-project-name
 *       description: Description for the job_trigger created by terraform
 *       displayName: TerraformDisplayName
 *       triggers:
 *         - schedule:
 *             recurrencePeriodDuration: 86400s
 *       inspectJob:
 *         inspectTemplateName: sample-inspect-template
 *         actions:
 *           - deidentify:
 *               cloudStorageOutput: gs://samplebucket/dir/
 *               fileTypesToTransforms:
 *                 - CSV
 *                 - TSV
 *               transformationDetailsStorageConfig:
 *                 table:
 *                   projectId: my-project-name
 *                   datasetId: ${default.datasetId}
 *                   tableId: ${defaultTable.tableId}
 *               transformationConfig:
 *                 deidentifyTemplate: sample-deidentify-template
 *                 imageRedactTemplate: sample-image-redact-template
 *                 structuredDeidentifyTemplate: sample-structured-deidentify-template
 *         storageConfig:
 *           cloudStorageOptions:
 *             fileSet:
 *               url: gs://mybucket/directory/
 *   default:
 *     type: gcp:bigquery:Dataset
 *     properties:
 *       datasetId: tf_test
 *       friendlyName: terraform-test
 *       description: Description for the dataset created by terraform
 *       location: US
 *       defaultTableExpirationMs: 3.6e+06
 *       labels:
 *         env: default
 *   defaultTable:
 *     type: gcp:bigquery:Table
 *     name: default
 *     properties:
 *       datasetId: ${default.datasetId}
 *       tableId: tf_test
 *       deletionProtection: false
 *       timePartitioning:
 *         type: DAY
 *       labels:
 *         env: default
 *       schema: |2
 *             [
 *             {
 *               "name": "quantity",
 *               "type": "NUMERIC",
 *               "mode": "NULLABLE",
 *               "description": "The quantity"
 *             },
 *             {
 *               "name": "name",
 *               "type": "STRING",
 *               "mode": "NULLABLE",
 *               "description": "Name of the object"
 *             }
 *             ]
 * ```
 * 
 * ### Dlp Job Trigger Hybrid
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const hybridTrigger = new gcp.dataloss.PreventionJobTrigger("hybrid_trigger", {
 *     parent: "projects/my-project-name",
 *     triggers: [{
 *         manual: {},
 *     }],
 *     inspectJob: {
 *         inspectTemplateName: "fake",
 *         actions: [{
 *             saveFindings: {
 *                 outputConfig: {
 *                     table: {
 *                         projectId: "project",
 *                         datasetId: "dataset",
 *                     },
 *                 },
 *             },
 *         }],
 *         storageConfig: {
 *             hybridOptions: {
 *                 description: "Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings",
 *                 requiredFindingLabelKeys: ["appointment-bookings-comments"],
 *                 labels: {
 *                     env: "prod",
 *                 },
 *                 tableOptions: {
 *                     identifyingFields: [{
 *                         name: "booking_id",
 *                     }],
 *                 },
 *             },
 *         },
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * hybrid_trigger = gcp.dataloss.PreventionJobTrigger("hybrid_trigger",
 *     parent="projects/my-project-name",
 *     triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
 *         manual=gcp.dataloss.PreventionJobTriggerTriggerManualArgs(),
 *     )],
 *     inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
 *         inspect_template_name="fake",
 *         actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
 *             save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
 *                 output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
 *                     table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
 *                         project_id="project",
 *                         dataset_id="dataset",
 *                     ),
 *                 ),
 *             ),
 *         )],
 *         storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
 *             hybrid_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs(
 *                 description="Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings",
 *                 required_finding_label_keys=["appointment-bookings-comments"],
 *                 labels={
 *                     "env": "prod",
 *                 },
 *                 table_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs(
 *                     identifying_fields=[gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingFieldArgs(
 *                         name="booking_id",
 *                     )],
 *                 ),
 *             ),
 *         ),
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var hybridTrigger = new Gcp.DataLoss.PreventionJobTrigger("hybrid_trigger", new()
 *     {
 *         Parent = "projects/my-project-name",
 *         Triggers = new[]
 *         {
 *             new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
 *             {
 *                 Manual = null,
 *             },
 *         },
 *         InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
 *         {
 *             InspectTemplateName = "fake",
 *             Actions = new[]
 *             {
 *                 new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
 *                 {
 *                     SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
 *                     {
 *                         OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
 *                         {
 *                             Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
 *                             {
 *                                 ProjectId = "project",
 *                                 DatasetId = "dataset",
 *                             },
 *                         },
 *                     },
 *                 },
 *             },
 *             StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
 *             {
 *                 HybridOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs
 *                 {
 *                     Description = "Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings",
 *                     RequiredFindingLabelKeys = new[]
 *                     {
 *                         "appointment-bookings-comments",
 *                     },
 *                     Labels =
 *                     {
 *                         { "env", "prod" },
 *                     },
 *                     TableOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs
 *                     {
 *                         IdentifyingFields = new[]
 *                         {
 *                             new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingFieldArgs
 *                             {
 *                                 Name = "booking_id",
 *                             },
 *                         },
 *                     },
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataloss.NewPreventionJobTrigger(ctx, "hybrid_trigger", &dataloss.PreventionJobTriggerArgs{
 * 			Parent: pulumi.String("projects/my-project-name"),
 * 			Triggers: dataloss.PreventionJobTriggerTriggerArray{
 * 				&dataloss.PreventionJobTriggerTriggerArgs{
 * 					Manual: nil,
 * 				},
 * 			},
 * 			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
 * 				InspectTemplateName: pulumi.String("fake"),
 * 				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
 * 					&dataloss.PreventionJobTriggerInspectJobActionArgs{
 * 						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
 * 							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
 * 								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
 * 									ProjectId: pulumi.String("project"),
 * 									DatasetId: pulumi.String("dataset"),
 * 								},
 * 							},
 * 						},
 * 					},
 * 				},
 * 				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
 * 					HybridOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs{
 * 						Description: pulumi.String("Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings"),
 * 						RequiredFindingLabelKeys: pulumi.StringArray{
 * 							pulumi.String("appointment-bookings-comments"),
 * 						},
 * 						Labels: pulumi.StringMap{
 * 							"env": pulumi.String("prod"),
 * 						},
 * 						TableOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs{
 * 							IdentifyingFields: dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingFieldArray{
 * 								&dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingFieldArgs{
 * 									Name: pulumi.String("booking_id"),
 * 								},
 * 							},
 * 						},
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataloss.PreventionJobTrigger;
 * import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerManualArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var hybridTrigger = new PreventionJobTrigger("hybridTrigger", PreventionJobTriggerArgs.builder()
 *             .parent("projects/my-project-name")
 *             .triggers(PreventionJobTriggerTriggerArgs.builder()
 *                 .manual()
 *                 .build())
 *             .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
 *                 .inspectTemplateName("fake")
 *                 .actions(PreventionJobTriggerInspectJobActionArgs.builder()
 *                     .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
 *                         .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
 *                             .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
 *                                 .projectId("project")
 *                                 .datasetId("dataset")
 *                                 .build())
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
 *                     .hybridOptions(PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs.builder()
 *                         .description("Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings")
 *                         .requiredFindingLabelKeys("appointment-bookings-comments")
 *                         .labels(Map.of("env", "prod"))
 *                         .tableOptions(PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs.builder()
 *                             .identifyingFields(PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingFieldArgs.builder()
 *                                 .name("booking_id")
 *                                 .build())
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   hybridTrigger:
 *     type: gcp:dataloss:PreventionJobTrigger
 *     name: hybrid_trigger
 *     properties:
 *       parent: projects/my-project-name
 *       triggers:
 *         - manual: {}
 *       inspectJob:
 *         inspectTemplateName: fake
 *         actions:
 *           - saveFindings:
 *               outputConfig:
 *                 table:
 *                   projectId: project
 *                   datasetId: dataset
 *         storageConfig:
 *           hybridOptions:
 *             description: Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings
 *             requiredFindingLabelKeys:
 *               - appointment-bookings-comments
 *             labels:
 *               env: prod
 *             tableOptions:
 *               identifyingFields:
 *                 - name: booking_id
 * ```
 * 
 * ### Dlp Job Trigger Inspect
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const inspect = new gcp.dataloss.PreventionJobTrigger("inspect", {
 *     parent: "projects/my-project-name",
 *     description: "Description",
 *     displayName: "Displayname",
 *     triggers: [{
 *         schedule: {
 *             recurrencePeriodDuration: "86400s",
 *         },
 *     }],
 *     inspectJob: {
 *         inspectTemplateName: "fake",
 *         actions: [{
 *             saveFindings: {
 *                 outputConfig: {
 *                     table: {
 *                         projectId: "project",
 *                         datasetId: "dataset",
 *                     },
 *                 },
 *             },
 *         }],
 *         storageConfig: {
 *             cloudStorageOptions: {
 *                 fileSet: {
 *                     url: "gs://mybucket/directory/",
 *                 },
 *             },
 *         },
 *         inspectConfig: {
 *             customInfoTypes: [{
 *                 infoType: {
 *                     name: "MY_CUSTOM_TYPE",
 *                 },
 *                 likelihood: "UNLIKELY",
 *                 regex: {
 *                     pattern: "test*",
 *                 },
 *             }],
 *             infoTypes: [{
 *                 name: "EMAIL_ADDRESS",
 *             }],
 *             minLikelihood: "UNLIKELY",
 *             ruleSets: [
 *                 {
 *                     infoTypes: [{
 *                         name: "EMAIL_ADDRESS",
 *                     }],
 *                     rules: [{
 *                         exclusionRule: {
 *                             regex: {
 *                                 pattern: "[email protected]",
 *                             },
 *                             matchingType: "MATCHING_TYPE_FULL_MATCH",
 *                         },
 *                     }],
 *                 },
 *                 {
 *                     infoTypes: [{
 *                         name: "MY_CUSTOM_TYPE",
 *                     }],
 *                     rules: [{
 *                         hotwordRule: {
 *                             hotwordRegex: {
 *                                 pattern: "example*",
 *                             },
 *                             proximity: {
 *                                 windowBefore: 50,
 *                             },
 *                             likelihoodAdjustment: {
 *                                 fixedLikelihood: "VERY_LIKELY",
 *                             },
 *                         },
 *                     }],
 *                 },
 *             ],
 *             limits: {
 *                 maxFindingsPerItem: 10,
 *                 maxFindingsPerRequest: 50,
 *             },
 *         },
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * inspect = gcp.dataloss.PreventionJobTrigger("inspect",
 *     parent="projects/my-project-name",
 *     description="Description",
 *     display_name="Displayname",
 *     triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
 *         schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
 *             recurrence_period_duration="86400s",
 *         ),
 *     )],
 *     inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
 *         inspect_template_name="fake",
 *         actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
 *             save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
 *                 output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
 *                     table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
 *                         project_id="project",
 *                         dataset_id="dataset",
 *                     ),
 *                 ),
 *             ),
 *         )],
 *         storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
 *             cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
 *                 file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
 *                     url="gs://mybucket/directory/",
 *                 ),
 *             ),
 *         ),
 *         inspect_config=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigArgs(
 *             custom_info_types=[gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeArgs(
 *                 info_type=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeArgs(
 *                     name="MY_CUSTOM_TYPE",
 *                 ),
 *                 likelihood="UNLIKELY",
 *                 regex=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegexArgs(
 *                     pattern="test*",
 *                 ),
 *             )],
 *             info_types=[gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigInfoTypeArgs(
 *                 name="EMAIL_ADDRESS",
 *             )],
 *             min_likelihood="UNLIKELY",
 *             rule_sets=[
 *                 gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetArgs(
 *                     info_types=[gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs(
 *                         name="EMAIL_ADDRESS",
 *                     )],
 *                     rules=[gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs(
 *                         exclusion_rule=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleArgs(
 *                             regex=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegexArgs(
 *                                 pattern="[email protected]",
 *                             ),
 *                             matching_type="MATCHING_TYPE_FULL_MATCH",
 *                         ),
 *                     )],
 *                 ),
 *                 gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetArgs(
 *                     info_types=[gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs(
 *                         name="MY_CUSTOM_TYPE",
 *                     )],
 *                     rules=[gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs(
 *                         hotword_rule=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleArgs(
 *                             hotword_regex=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegexArgs(
 *                                 pattern="example*",
 *                             ),
 *                             proximity=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximityArgs(
 *                                 window_before=50,
 *                             ),
 *                             likelihood_adjustment=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustmentArgs(
 *                                 fixed_likelihood="VERY_LIKELY",
 *                             ),
 *                         ),
 *                     )],
 *                 ),
 *             ],
 *             limits=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigLimitsArgs(
 *                 max_findings_per_item=10,
 *                 max_findings_per_request=50,
 *             ),
 *         ),
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var inspect = new Gcp.DataLoss.PreventionJobTrigger("inspect", new()
 *     {
 *         Parent = "projects/my-project-name",
 *         Description = "Description",
 *         DisplayName = "Displayname",
 *         Triggers = new[]
 *         {
 *             new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
 *             {
 *                 Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
 *                 {
 *                     RecurrencePeriodDuration = "86400s",
 *                 },
 *             },
 *         },
 *         InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
 *         {
 *             InspectTemplateName = "fake",
 *             Actions = new[]
 *             {
 *                 new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
 *                 {
 *                     SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
 *                     {
 *                         OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
 *                         {
 *                             Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
 *                             {
 *                                 ProjectId = "project",
 *                                 DatasetId = "dataset",
 *                             },
 *                         },
 *                     },
 *                 },
 *             },
 *             StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
 *             {
 *                 CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
 *                 {
 *                     FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
 *                     {
 *                         Url = "gs://mybucket/directory/",
 *                     },
 *                 },
 *             },
 *             InspectConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigArgs
 *             {
 *                 CustomInfoTypes = new[]
 *                 {
 *                     new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeArgs
 *                     {
 *                         InfoType = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeArgs
 *                         {
 *                             Name = "MY_CUSTOM_TYPE",
 *                         },
 *                         Likelihood = "UNLIKELY",
 *                         Regex = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegexArgs
 *                         {
 *                             Pattern = "test*",
 *                         },
 *                     },
 *                 },
 *                 InfoTypes = new[]
 *                 {
 *                     new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigInfoTypeArgs
 *                     {
 *                         Name = "EMAIL_ADDRESS",
 *                     },
 *                 },
 *                 MinLikelihood = "UNLIKELY",
 *                 RuleSets = new[]
 *                 {
 *                     new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetArgs
 *                     {
 *                         InfoTypes = new[]
 *                         {
 *                             new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs
 *                             {
 *                                 Name = "EMAIL_ADDRESS",
 *                             },
 *                         },
 *                         Rules = new[]
 *                         {
 *                             new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs
 *                             {
 *                                 ExclusionRule = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleArgs
 *                                 {
 *                                     Regex = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegexArgs
 *                                     {
 *                                         Pattern = "[email protected]",
 *                                     },
 *                                     MatchingType = "MATCHING_TYPE_FULL_MATCH",
 *                                 },
 *                             },
 *                         },
 *                     },
 *                     new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetArgs
 *                     {
 *                         InfoTypes = new[]
 *                         {
 *                             new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs
 *                             {
 *                                 Name = "MY_CUSTOM_TYPE",
 *                             },
 *                         },
 *                         Rules = new[]
 *                         {
 *                             new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs
 *                             {
 *                                 HotwordRule = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleArgs
 *                                 {
 *                                     HotwordRegex = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegexArgs
 *                                     {
 *                                         Pattern = "example*",
 *                                     },
 *                                     Proximity = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximityArgs
 *                                     {
 *                                         WindowBefore = 50,
 *                                     },
 *                                     LikelihoodAdjustment = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustmentArgs
 *                                     {
 *                                         FixedLikelihood = "VERY_LIKELY",
 *                                     },
 *                                 },
 *                             },
 *                         },
 *                     },
 *                 },
 *                 Limits = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigLimitsArgs
 *                 {
 *                     MaxFindingsPerItem = 10,
 *                     MaxFindingsPerRequest = 50,
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataloss.NewPreventionJobTrigger(ctx, "inspect", &dataloss.PreventionJobTriggerArgs{
 * 			Parent:      pulumi.String("projects/my-project-name"),
 * 			Description: pulumi.String("Description"),
 * 			DisplayName: pulumi.String("Displayname"),
 * 			Triggers: dataloss.PreventionJobTriggerTriggerArray{
 * 				&dataloss.PreventionJobTriggerTriggerArgs{
 * 					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
 * 						RecurrencePeriodDuration: pulumi.String("86400s"),
 * 					},
 * 				},
 * 			},
 * 			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
 * 				InspectTemplateName: pulumi.String("fake"),
 * 				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
 * 					&dataloss.PreventionJobTriggerInspectJobActionArgs{
 * 						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
 * 							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
 * 								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
 * 									ProjectId: pulumi.String("project"),
 * 									DatasetId: pulumi.String("dataset"),
 * 								},
 * 							},
 * 						},
 * 					},
 * 				},
 * 				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
 * 					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
 * 						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
 * 							Url: pulumi.String("gs://mybucket/directory/"),
 * 						},
 * 					},
 * 				},
 * 				InspectConfig: &dataloss.PreventionJobTriggerInspectJobInspectConfigArgs{
 * 					CustomInfoTypes: dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeArray{
 * 						&dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeArgs{
 * 							InfoType: &dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeArgs{
 * 								Name: pulumi.String("MY_CUSTOM_TYPE"),
 * 							},
 * 							Likelihood: pulumi.String("UNLIKELY"),
 * 							Regex: &dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegexArgs{
 * 								Pattern: pulumi.String("test*"),
 * 							},
 * 						},
 * 					},
 * 					InfoTypes: dataloss.PreventionJobTriggerInspectJobInspectConfigInfoTypeArray{
 * 						&dataloss.PreventionJobTriggerInspectJobInspectConfigInfoTypeArgs{
 * 							Name: pulumi.String("EMAIL_ADDRESS"),
 * 						},
 * 					},
 * 					MinLikelihood: pulumi.String("UNLIKELY"),
 * 					RuleSets: dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetArray{
 * 						&dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetArgs{
 * 							InfoTypes: dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArray{
 * 								&dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs{
 * 									Name: pulumi.String("EMAIL_ADDRESS"),
 * 								},
 * 							},
 * 							Rules: dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArray{
 * 								&dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs{
 * 									ExclusionRule: &dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleArgs{
 * 										Regex: &dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegexArgs{
 * 											Pattern: pulumi.String("[email protected]"),
 * 										},
 * 										MatchingType: pulumi.String("MATCHING_TYPE_FULL_MATCH"),
 * 									},
 * 								},
 * 							},
 * 						},
 * 						&dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetArgs{
 * 							InfoTypes: dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArray{
 * 								&dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs{
 * 									Name: pulumi.String("MY_CUSTOM_TYPE"),
 * 								},
 * 							},
 * 							Rules: dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArray{
 * 								&dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs{
 * 									HotwordRule: &dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleArgs{
 * 										HotwordRegex: &dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegexArgs{
 * 											Pattern: pulumi.String("example*"),
 * 										},
 * 										Proximity: &dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximityArgs{
 * 											WindowBefore: pulumi.Int(50),
 * 										},
 * 										LikelihoodAdjustment: &dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustmentArgs{
 * 											FixedLikelihood: pulumi.String("VERY_LIKELY"),
 * 										},
 * 									},
 * 								},
 * 							},
 * 						},
 * 					},
 * 					Limits: &dataloss.PreventionJobTriggerInspectJobInspectConfigLimitsArgs{
 * 						MaxFindingsPerItem:    pulumi.Int(10),
 * 						MaxFindingsPerRequest: pulumi.Int(50),
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataloss.PreventionJobTrigger;
 * import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobInspectConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobInspectConfigLimitsArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var inspect = new PreventionJobTrigger("inspect", PreventionJobTriggerArgs.builder()
 *             .parent("projects/my-project-name")
 *             .description("Description")
 *             .displayName("Displayname")
 *             .triggers(PreventionJobTriggerTriggerArgs.builder()
 *                 .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
 *                     .recurrencePeriodDuration("86400s")
 *                     .build())
 *                 .build())
 *             .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
 *                 .inspectTemplateName("fake")
 *                 .actions(PreventionJobTriggerInspectJobActionArgs.builder()
 *                     .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
 *                         .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
 *                             .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
 *                                 .projectId("project")
 *                                 .datasetId("dataset")
 *                                 .build())
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
 *                     .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
 *                         .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
 *                             .url("gs://mybucket/directory/")
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .inspectConfig(PreventionJobTriggerInspectJobInspectConfigArgs.builder()
 *                     .customInfoTypes(PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeArgs.builder()
 *                         .infoType(PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeArgs.builder()
 *                             .name("MY_CUSTOM_TYPE")
 *                             .build())
 *                         .likelihood("UNLIKELY")
 *                         .regex(PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegexArgs.builder()
 *                             .pattern("test*")
 *                             .build())
 *                         .build())
 *                     .infoTypes(PreventionJobTriggerInspectJobInspectConfigInfoTypeArgs.builder()
 *                         .name("EMAIL_ADDRESS")
 *                         .build())
 *                     .minLikelihood("UNLIKELY")
 *                     .ruleSets(
 *                         PreventionJobTriggerInspectJobInspectConfigRuleSetArgs.builder()
 *                             .infoTypes(PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs.builder()
 *                                 .name("EMAIL_ADDRESS")
 *                                 .build())
 *                             .rules(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs.builder()
 *                                 .exclusionRule(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleArgs.builder()
 *                                     .regex(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegexArgs.builder()
 *                                         .pattern("[email protected]")
 *                                         .build())
 *                                     .matchingType("MATCHING_TYPE_FULL_MATCH")
 *                                     .build())
 *                                 .build())
 *                             .build(),
 *                         PreventionJobTriggerInspectJobInspectConfigRuleSetArgs.builder()
 *                             .infoTypes(PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs.builder()
 *                                 .name("MY_CUSTOM_TYPE")
 *                                 .build())
 *                             .rules(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs.builder()
 *                                 .hotwordRule(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleArgs.builder()
 *                                     .hotwordRegex(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegexArgs.builder()
 *                                         .pattern("example*")
 *                                         .build())
 *                                     .proximity(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximityArgs.builder()
 *                                         .windowBefore(50)
 *                                         .build())
 *                                     .likelihoodAdjustment(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustmentArgs.builder()
 *                                         .fixedLikelihood("VERY_LIKELY")
 *                                         .build())
 *                                     .build())
 *                                 .build())
 *                             .build())
 *                     .limits(PreventionJobTriggerInspectJobInspectConfigLimitsArgs.builder()
 *                         .maxFindingsPerItem(10)
 *                         .maxFindingsPerRequest(50)
 *                         .build())
 *                     .build())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   inspect:
 *     type: gcp:dataloss:PreventionJobTrigger
 *     properties:
 *       parent: projects/my-project-name
 *       description: Description
 *       displayName: Displayname
 *       triggers:
 *         - schedule:
 *             recurrencePeriodDuration: 86400s
 *       inspectJob:
 *         inspectTemplateName: fake
 *         actions:
 *           - saveFindings:
 *               outputConfig:
 *                 table:
 *                   projectId: project
 *                   datasetId: dataset
 *         storageConfig:
 *           cloudStorageOptions:
 *             fileSet:
 *               url: gs://mybucket/directory/
 *         inspectConfig:
 *           customInfoTypes:
 *             - infoType:
 *                 name: MY_CUSTOM_TYPE
 *               likelihood: UNLIKELY
 *               regex:
 *                 pattern: test*
 *           infoTypes:
 *             - name: EMAIL_ADDRESS
 *           minLikelihood: UNLIKELY
 *           ruleSets:
 *             - infoTypes:
 *                 - name: EMAIL_ADDRESS
 *               rules:
 *                 - exclusionRule:
 *                     regex:
 *                       pattern: [email protected]
 *                     matchingType: MATCHING_TYPE_FULL_MATCH
 *             - infoTypes:
 *                 - name: MY_CUSTOM_TYPE
 *               rules:
 *                 - hotwordRule:
 *                     hotwordRegex:
 *                       pattern: example*
 *                     proximity:
 *                       windowBefore: 50
 *                     likelihoodAdjustment:
 *                       fixedLikelihood: VERY_LIKELY
 *           limits:
 *             maxFindingsPerItem: 10
 *             maxFindingsPerRequest: 50
 * ```
 * 
 * ### Dlp Job Trigger Publish To Stackdriver
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const publishToStackdriver = new gcp.dataloss.PreventionJobTrigger("publish_to_stackdriver", {
 *     parent: "projects/my-project-name",
 *     description: "Description for the job_trigger created by terraform",
 *     displayName: "TerraformDisplayName",
 *     triggers: [{
 *         schedule: {
 *             recurrencePeriodDuration: "86400s",
 *         },
 *     }],
 *     inspectJob: {
 *         inspectTemplateName: "sample-inspect-template",
 *         actions: [{
 *             publishToStackdriver: {},
 *         }],
 *         storageConfig: {
 *             cloudStorageOptions: {
 *                 fileSet: {
 *                     url: "gs://mybucket/directory/",
 *                 },
 *             },
 *         },
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * publish_to_stackdriver = gcp.dataloss.PreventionJobTrigger("publish_to_stackdriver",
 *     parent="projects/my-project-name",
 *     description="Description for the job_trigger created by terraform",
 *     display_name="TerraformDisplayName",
 *     triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
 *         schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
 *             recurrence_period_duration="86400s",
 *         ),
 *     )],
 *     inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
 *         inspect_template_name="sample-inspect-template",
 *         actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
 *             publish_to_stackdriver=gcp.dataloss.PreventionJobTriggerInspectJobActionPublishToStackdriverArgs(),
 *         )],
 *         storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
 *             cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
 *                 file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
 *                     url="gs://mybucket/directory/",
 *                 ),
 *             ),
 *         ),
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var publishToStackdriver = new Gcp.DataLoss.PreventionJobTrigger("publish_to_stackdriver", new()
 *     {
 *         Parent = "projects/my-project-name",
 *         Description = "Description for the job_trigger created by terraform",
 *         DisplayName = "TerraformDisplayName",
 *         Triggers = new[]
 *         {
 *             new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
 *             {
 *                 Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
 *                 {
 *                     RecurrencePeriodDuration = "86400s",
 *                 },
 *             },
 *         },
 *         InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
 *         {
 *             InspectTemplateName = "sample-inspect-template",
 *             Actions = new[]
 *             {
 *                 new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
 *                 {
 *                     PublishToStackdriver = null,
 *                 },
 *             },
 *             StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
 *             {
 *                 CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
 *                 {
 *                     FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
 *                     {
 *                         Url = "gs://mybucket/directory/",
 *                     },
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataloss.NewPreventionJobTrigger(ctx, "publish_to_stackdriver", &dataloss.PreventionJobTriggerArgs{
 * 			Parent:      pulumi.String("projects/my-project-name"),
 * 			Description: pulumi.String("Description for the job_trigger created by terraform"),
 * 			DisplayName: pulumi.String("TerraformDisplayName"),
 * 			Triggers: dataloss.PreventionJobTriggerTriggerArray{
 * 				&dataloss.PreventionJobTriggerTriggerArgs{
 * 					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
 * 						RecurrencePeriodDuration: pulumi.String("86400s"),
 * 					},
 * 				},
 * 			},
 * 			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
 * 				InspectTemplateName: pulumi.String("sample-inspect-template"),
 * 				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
 * 					&dataloss.PreventionJobTriggerInspectJobActionArgs{
 * 						PublishToStackdriver: nil,
 * 					},
 * 				},
 * 				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
 * 					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
 * 						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
 * 							Url: pulumi.String("gs://mybucket/directory/"),
 * 						},
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataloss.PreventionJobTrigger;
 * import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var publishToStackdriver = new PreventionJobTrigger("publishToStackdriver", PreventionJobTriggerArgs.builder()
 *             .parent("projects/my-project-name")
 *             .description("Description for the job_trigger created by terraform")
 *             .displayName("TerraformDisplayName")
 *             .triggers(PreventionJobTriggerTriggerArgs.builder()
 *                 .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
 *                     .recurrencePeriodDuration("86400s")
 *                     .build())
 *                 .build())
 *             .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
 *                 .inspectTemplateName("sample-inspect-template")
 *                 .actions(PreventionJobTriggerInspectJobActionArgs.builder()
 *                     .publishToStackdriver()
 *                     .build())
 *                 .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
 *                     .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
 *                         .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
 *                             .url("gs://mybucket/directory/")
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   publishToStackdriver:
 *     type: gcp:dataloss:PreventionJobTrigger
 *     name: publish_to_stackdriver
 *     properties:
 *       parent: projects/my-project-name
 *       description: Description for the job_trigger created by terraform
 *       displayName: TerraformDisplayName
 *       triggers:
 *         - schedule:
 *             recurrencePeriodDuration: 86400s
 *       inspectJob:
 *         inspectTemplateName: sample-inspect-template
 *         actions:
 *           - publishToStackdriver: {}
 *         storageConfig:
 *           cloudStorageOptions:
 *             fileSet:
 *               url: gs://mybucket/directory/
 * ```
 * 
 * ### Dlp Job Trigger With Id
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const withTriggerId = new gcp.dataloss.PreventionJobTrigger("with_trigger_id", {
 *     parent: "projects/my-project-name",
 *     description: "Starting description",
 *     displayName: "display",
 *     triggerId: "id-",
 *     triggers: [{
 *         schedule: {
 *             recurrencePeriodDuration: "86400s",
 *         },
 *     }],
 *     inspectJob: {
 *         inspectTemplateName: "fake",
 *         actions: [{
 *             saveFindings: {
 *                 outputConfig: {
 *                     table: {
 *                         projectId: "project",
 *                         datasetId: "dataset123",
 *                     },
 *                 },
 *             },
 *         }],
 *         storageConfig: {
 *             cloudStorageOptions: {
 *                 fileSet: {
 *                     url: "gs://mybucket/directory/",
 *                 },
 *             },
 *         },
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * with_trigger_id = gcp.dataloss.PreventionJobTrigger("with_trigger_id",
 *     parent="projects/my-project-name",
 *     description="Starting description",
 *     display_name="display",
 *     trigger_id="id-",
 *     triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
 *         schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
 *             recurrence_period_duration="86400s",
 *         ),
 *     )],
 *     inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
 *         inspect_template_name="fake",
 *         actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
 *             save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
 *                 output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
 *                     table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
 *                         project_id="project",
 *                         dataset_id="dataset123",
 *                     ),
 *                 ),
 *             ),
 *         )],
 *         storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
 *             cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
 *                 file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
 *                     url="gs://mybucket/directory/",
 *                 ),
 *             ),
 *         ),
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var withTriggerId = new Gcp.DataLoss.PreventionJobTrigger("with_trigger_id", new()
 *     {
 *         Parent = "projects/my-project-name",
 *         Description = "Starting description",
 *         DisplayName = "display",
 *         TriggerId = "id-",
 *         Triggers = new[]
 *         {
 *             new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
 *             {
 *                 Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
 *                 {
 *                     RecurrencePeriodDuration = "86400s",
 *                 },
 *             },
 *         },
 *         InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
 *         {
 *             InspectTemplateName = "fake",
 *             Actions = new[]
 *             {
 *                 new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
 *                 {
 *                     SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
 *                     {
 *                         OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
 *                         {
 *                             Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
 *                             {
 *                                 ProjectId = "project",
 *                                 DatasetId = "dataset123",
 *                             },
 *                         },
 *                     },
 *                 },
 *             },
 *             StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
 *             {
 *                 CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
 *                 {
 *                     FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
 *                     {
 *                         Url = "gs://mybucket/directory/",
 *                     },
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataloss.NewPreventionJobTrigger(ctx, "with_trigger_id", &dataloss.PreventionJobTriggerArgs{
 * 			Parent:      pulumi.String("projects/my-project-name"),
 * 			Description: pulumi.String("Starting description"),
 * 			DisplayName: pulumi.String("display"),
 * 			TriggerId:   pulumi.String("id-"),
 * 			Triggers: dataloss.PreventionJobTriggerTriggerArray{
 * 				&dataloss.PreventionJobTriggerTriggerArgs{
 * 					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
 * 						RecurrencePeriodDuration: pulumi.String("86400s"),
 * 					},
 * 				},
 * 			},
 * 			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
 * 				InspectTemplateName: pulumi.String("fake"),
 * 				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
 * 					&dataloss.PreventionJobTriggerInspectJobActionArgs{
 * 						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
 * 							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
 * 								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
 * 									ProjectId: pulumi.String("project"),
 * 									DatasetId: pulumi.String("dataset123"),
 * 								},
 * 							},
 * 						},
 * 					},
 * 				},
 * 				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
 * 					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
 * 						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
 * 							Url: pulumi.String("gs://mybucket/directory/"),
 * 						},
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataloss.PreventionJobTrigger;
 * import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var withTriggerId = new PreventionJobTrigger("withTriggerId", PreventionJobTriggerArgs.builder()
 *             .parent("projects/my-project-name")
 *             .description("Starting description")
 *             .displayName("display")
 *             .triggerId("id-")
 *             .triggers(PreventionJobTriggerTriggerArgs.builder()
 *                 .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
 *                     .recurrencePeriodDuration("86400s")
 *                     .build())
 *                 .build())
 *             .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
 *                 .inspectTemplateName("fake")
 *                 .actions(PreventionJobTriggerInspectJobActionArgs.builder()
 *                     .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
 *                         .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
 *                             .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
 *                                 .projectId("project")
 *                                 .datasetId("dataset123")
 *                                 .build())
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
 *                     .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
 *                         .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
 *                             .url("gs://mybucket/directory/")
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   withTriggerId:
 *     type: gcp:dataloss:PreventionJobTrigger
 *     name: with_trigger_id
 *     properties:
 *       parent: projects/my-project-name
 *       description: Starting description
 *       displayName: display
 *       triggerId: id-
 *       triggers:
 *         - schedule:
 *             recurrencePeriodDuration: 86400s
 *       inspectJob:
 *         inspectTemplateName: fake
 *         actions:
 *           - saveFindings:
 *               outputConfig:
 *                 table:
 *                   projectId: project
 *                   datasetId: dataset123
 *         storageConfig:
 *           cloudStorageOptions:
 *             fileSet:
 *               url: gs://mybucket/directory/
 * ```
 * 
 * ### Dlp Job Trigger Multiple Actions
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const basic = new gcp.dataloss.PreventionJobTrigger("basic", {
 *     parent: "projects/my-project-name",
 *     description: "Description",
 *     displayName: "Displayname",
 *     triggers: [{
 *         schedule: {
 *             recurrencePeriodDuration: "86400s",
 *         },
 *     }],
 *     inspectJob: {
 *         inspectTemplateName: "fake",
 *         actions: [
 *             {
 *                 saveFindings: {
 *                     outputConfig: {
 *                         table: {
 *                             projectId: "project",
 *                             datasetId: "dataset",
 *                         },
 *                     },
 *                 },
 *             },
 *             {
 *                 pubSub: {
 *                     topic: "projects/project/topics/topic-name",
 *                 },
 *             },
 *         ],
 *         storageConfig: {
 *             cloudStorageOptions: {
 *                 fileSet: {
 *                     url: "gs://mybucket/directory/",
 *                 },
 *             },
 *         },
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * basic = gcp.dataloss.PreventionJobTrigger("basic",
 *     parent="projects/my-project-name",
 *     description="Description",
 *     display_name="Displayname",
 *     triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
 *         schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
 *             recurrence_period_duration="86400s",
 *         ),
 *     )],
 *     inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
 *         inspect_template_name="fake",
 *         actions=[
 *             gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
 *                 save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
 *                     output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
 *                         table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
 *                             project_id="project",
 *                             dataset_id="dataset",
 *                         ),
 *                     ),
 *                 ),
 *             ),
 *             gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
 *                 pub_sub=gcp.dataloss.PreventionJobTriggerInspectJobActionPubSubArgs(
 *                     topic="projects/project/topics/topic-name",
 *                 ),
 *             ),
 *         ],
 *         storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
 *             cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
 *                 file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
 *                     url="gs://mybucket/directory/",
 *                 ),
 *             ),
 *         ),
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var basic = new Gcp.DataLoss.PreventionJobTrigger("basic", new()
 *     {
 *         Parent = "projects/my-project-name",
 *         Description = "Description",
 *         DisplayName = "Displayname",
 *         Triggers = new[]
 *         {
 *             new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
 *             {
 *                 Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
 *                 {
 *                     RecurrencePeriodDuration = "86400s",
 *                 },
 *             },
 *         },
 *         InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
 *         {
 *             InspectTemplateName = "fake",
 *             Actions = new[]
 *             {
 *                 new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
 *                 {
 *                     SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
 *                     {
 *                         OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
 *                         {
 *                             Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
 *                             {
 *                                 ProjectId = "project",
 *                                 DatasetId = "dataset",
 *                             },
 *                         },
 *                     },
 *                 },
 *                 new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
 *                 {
 *                     PubSub = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionPubSubArgs
 *                     {
 *                         Topic = "projects/project/topics/topic-name",
 *                     },
 *                 },
 *             },
 *             StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
 *             {
 *                 CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
 *                 {
 *                     FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
 *                     {
 *                         Url = "gs://mybucket/directory/",
 *                     },
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataloss.NewPreventionJobTrigger(ctx, "basic", &dataloss.PreventionJobTriggerArgs{
 * 			Parent:      pulumi.String("projects/my-project-name"),
 * 			Description: pulumi.String("Description"),
 * 			DisplayName: pulumi.String("Displayname"),
 * 			Triggers: dataloss.PreventionJobTriggerTriggerArray{
 * 				&dataloss.PreventionJobTriggerTriggerArgs{
 * 					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
 * 						RecurrencePeriodDuration: pulumi.String("86400s"),
 * 					},
 * 				},
 * 			},
 * 			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
 * 				InspectTemplateName: pulumi.String("fake"),
 * 				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
 * 					&dataloss.PreventionJobTriggerInspectJobActionArgs{
 * 						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
 * 							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
 * 								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
 * 									ProjectId: pulumi.String("project"),
 * 									DatasetId: pulumi.String("dataset"),
 * 								},
 * 							},
 * 						},
 * 					},
 * 					&dataloss.PreventionJobTriggerInspectJobActionArgs{
 * 						PubSub: &dataloss.PreventionJobTriggerInspectJobActionPubSubArgs{
 * 							Topic: pulumi.String("projects/project/topics/topic-name"),
 * 						},
 * 					},
 * 				},
 * 				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
 * 					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
 * 						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
 * 							Url: pulumi.String("gs://mybucket/directory/"),
 * 						},
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataloss.PreventionJobTrigger;
 * import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var basic = new PreventionJobTrigger("basic", PreventionJobTriggerArgs.builder()
 *             .parent("projects/my-project-name")
 *             .description("Description")
 *             .displayName("Displayname")
 *             .triggers(PreventionJobTriggerTriggerArgs.builder()
 *                 .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
 *                     .recurrencePeriodDuration("86400s")
 *                     .build())
 *                 .build())
 *             .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
 *                 .inspectTemplateName("fake")
 *                 .actions(
 *                     PreventionJobTriggerInspectJobActionArgs.builder()
 *                         .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
 *                             .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
 *                                 .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
 *                                     .projectId("project")
 *                                     .datasetId("dataset")
 *                                     .build())
 *                                 .build())
 *                             .build())
 *                         .build(),
 *                     PreventionJobTriggerInspectJobActionArgs.builder()
 *                         .pubSub(PreventionJobTriggerInspectJobActionPubSubArgs.builder()
 *                             .topic("projects/project/topics/topic-name")
 *                             .build())
 *                         .build())
 *                 .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
 *                     .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
 *                         .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
 *                             .url("gs://mybucket/directory/")
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   basic:
 *     type: gcp:dataloss:PreventionJobTrigger
 *     properties:
 *       parent: projects/my-project-name
 *       description: Description
 *       displayName: Displayname
 *       triggers:
 *         - schedule:
 *             recurrencePeriodDuration: 86400s
 *       inspectJob:
 *         inspectTemplateName: fake
 *         actions:
 *           - saveFindings:
 *               outputConfig:
 *                 table:
 *                   projectId: project
 *                   datasetId: dataset
 *           - pubSub:
 *               topic: projects/project/topics/topic-name
 *         storageConfig:
 *           cloudStorageOptions:
 *             fileSet:
 *               url: gs://mybucket/directory/
 * ```
 * 
 * ### Dlp Job Trigger Cloud Storage Optional Timespan Autopopulation
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const basic = new gcp.dataloss.PreventionJobTrigger("basic", {
 *     parent: "projects/my-project-name",
 *     description: "Description",
 *     displayName: "Displayname",
 *     triggers: [{
 *         schedule: {
 *             recurrencePeriodDuration: "86400s",
 *         },
 *     }],
 *     inspectJob: {
 *         inspectTemplateName: "fake",
 *         actions: [{
 *             saveFindings: {
 *                 outputConfig: {
 *                     table: {
 *                         projectId: "project",
 *                         datasetId: "dataset",
 *                     },
 *                 },
 *             },
 *         }],
 *         storageConfig: {
 *             timespanConfig: {
 *                 enableAutoPopulationOfTimespanConfig: true,
 *             },
 *             cloudStorageOptions: {
 *                 fileSet: {
 *                     url: "gs://mybucket/directory/",
 *                 },
 *             },
 *         },
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * basic = gcp.dataloss.PreventionJobTrigger("basic",
 *     parent="projects/my-project-name",
 *     description="Description",
 *     display_name="Displayname",
 *     triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
 *         schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
 *             recurrence_period_duration="86400s",
 *         ),
 *     )],
 *     inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
 *         inspect_template_name="fake",
 *         actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
 *             save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
 *                 output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
 *                     table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
 *                         project_id="project",
 *                         dataset_id="dataset",
 *                     ),
 *                 ),
 *             ),
 *         )],
 *         storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
 *             timespan_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigTimespanConfigArgs(
 *                 enable_auto_population_of_timespan_config=True,
 *             ),
 *             cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
 *                 file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
 *                     url="gs://mybucket/directory/",
 *                 ),
 *             ),
 *         ),
 *     ))
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var basic = new Gcp.DataLoss.PreventionJobTrigger("basic", new()
 *     {
 *         Parent = "projects/my-project-name",
 *         Description = "Description",
 *         DisplayName = "Displayname",
 *         Triggers = new[]
 *         {
 *             new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
 *             {
 *                 Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
 *                 {
 *                     RecurrencePeriodDuration = "86400s",
 *                 },
 *             },
 *         },
 *         InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
 *         {
 *             InspectTemplateName = "fake",
 *             Actions = new[]
 *             {
 *                 new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
 *                 {
 *                     SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
 *                     {
 *                         OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
 *                         {
 *                             Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
 *                             {
 *                                 ProjectId = "project",
 *                                 DatasetId = "dataset",
 *                             },
 *                         },
 *                     },
 *                 },
 *             },
 *             StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
 *             {
 *                 TimespanConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigTimespanConfigArgs
 *                 {
 *                     EnableAutoPopulationOfTimespanConfig = true,
 *                 },
 *                 CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
 *                 {
 *                     FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
 *                     {
 *                         Url = "gs://mybucket/directory/",
 *                     },
 *                 },
 *             },
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		_, err := dataloss.NewPreventionJobTrigger(ctx, "basic", &dataloss.PreventionJobTriggerArgs{
 * 			Parent:      pulumi.String("projects/my-project-name"),
 * 			Description: pulumi.String("Description"),
 * 			DisplayName: pulumi.String("Displayname"),
 * 			Triggers: dataloss.PreventionJobTriggerTriggerArray{
 * 				&dataloss.PreventionJobTriggerTriggerArgs{
 * 					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
 * 						RecurrencePeriodDuration: pulumi.String("86400s"),
 * 					},
 * 				},
 * 			},
 * 			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
 * 				InspectTemplateName: pulumi.String("fake"),
 * 				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
 * 					&dataloss.PreventionJobTriggerInspectJobActionArgs{
 * 						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
 * 							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
 * 								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
 * 									ProjectId: pulumi.String("project"),
 * 									DatasetId: pulumi.String("dataset"),
 * 								},
 * 							},
 * 						},
 * 					},
 * 				},
 * 				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
 * 					TimespanConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigTimespanConfigArgs{
 * 						EnableAutoPopulationOfTimespanConfig: pulumi.Bool(true),
 * 					},
 * 					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
 * 						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
 * 							Url: pulumi.String("gs://mybucket/directory/"),
 * 						},
 * 					},
 * 				},
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.dataloss.PreventionJobTrigger;
 * import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigTimespanConfigArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
 * import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var basic = new PreventionJobTrigger("basic", PreventionJobTriggerArgs.builder()
 *             .parent("projects/my-project-name")
 *             .description("Description")
 *             .displayName("Displayname")
 *             .triggers(PreventionJobTriggerTriggerArgs.builder()
 *                 .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
 *                     .recurrencePeriodDuration("86400s")
 *                     .build())
 *                 .build())
 *             .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
 *                 .inspectTemplateName("fake")
 *                 .actions(PreventionJobTriggerInspectJobActionArgs.builder()
 *                     .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
 *                         .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
 *                             .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
 *                                 .projectId("project")
 *                                 .datasetId("dataset")
 *                                 .build())
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
 *                     .timespanConfig(PreventionJobTriggerInspectJobStorageConfigTimespanConfigArgs.builder()
 *                         .enableAutoPopulationOfTimespanConfig(true)
 *                         .build())
 *                     .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
 *                         .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
 *                             .url("gs://mybucket/directory/")
 *                             .build())
 *                         .build())
 *                     .build())
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   basic:
 *     type: gcp:dataloss:PreventionJobTrigger
 *     properties:
 *       parent: projects/my-project-name
 *       description: Description
 *       displayName: Displayname
 *       triggers:
 *         - schedule:
 *             recurrencePeriodDuration: 86400s
 *       inspectJob:
 *         inspectTemplateName: fake
 *         actions:
 *           - saveFindings:
 *               outputConfig:
 *                 table:
 *                   projectId: project
 *                   datasetId: dataset
 *         storageConfig:
 *           timespanConfig:
 *             enableAutoPopulationOfTimespanConfig: true
 *           cloudStorageOptions:
 *             fileSet:
 *               url: gs://mybucket/directory/
 * ```
 * 
 * ## Import
 * JobTrigger can be imported using any of these accepted formats:
 * * `{{parent}}/jobTriggers/{{name}}`
 * * `{{parent}}/{{name}}`
 * When using the `pulumi import` command, JobTrigger can be imported using one of the formats above. For example:
 * ```sh
 * $ pulumi import gcp:dataloss/preventionJobTrigger:PreventionJobTrigger default {{parent}}/jobTriggers/{{name}}
 * ```
 * ```sh
 * $ pulumi import gcp:dataloss/preventionJobTrigger:PreventionJobTrigger default {{parent}}/{{name}}
 * ```
 * @property description A description of the job trigger.
 * @property displayName User set display name of the job trigger.
 * @property inspectJob Controls what and how to inspect for findings.
 * @property parent The parent of the trigger, either in the format `projects/{{project}}`
 * or `projects/{{project}}/locations/{{location}}`
 * @property status Whether the trigger is currently active. Default value: "HEALTHY" Possible values: ["PAUSED", "HEALTHY", "CANCELLED"]
 * @property triggerId The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular
 * expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
 * @property triggers What event needs to occur for a new job to be started.
 * Structure is documented below.
 */
public data class PreventionJobTriggerArgs(
    public val description: Output? = null,
    public val displayName: Output? = null,
    public val inspectJob: Output? = null,
    public val parent: Output? = null,
    public val status: Output? = null,
    public val triggerId: Output? = null,
    public val triggers: Output>? = null,
) : ConvertibleToJava {
    override fun toJava(): com.pulumi.gcp.dataloss.PreventionJobTriggerArgs =
        com.pulumi.gcp.dataloss.PreventionJobTriggerArgs.builder()
            .description(description?.applyValue({ args0 -> args0 }))
            .displayName(displayName?.applyValue({ args0 -> args0 }))
            .inspectJob(inspectJob?.applyValue({ args0 -> args0.let({ args0 -> args0.toJava() }) }))
            .parent(parent?.applyValue({ args0 -> args0 }))
            .status(status?.applyValue({ args0 -> args0 }))
            .triggerId(triggerId?.applyValue({ args0 -> args0 }))
            .triggers(
                triggers?.applyValue({ args0 ->
                    args0.map({ args0 ->
                        args0.let({ args0 ->
                            args0.toJava()
                        })
                    })
                }),
            ).build()
}

/**
 * Builder for [PreventionJobTriggerArgs].
 */
@PulumiTagMarker
public class PreventionJobTriggerArgsBuilder internal constructor() {
    private var description: Output? = null

    private var displayName: Output? = null

    private var inspectJob: Output? = null

    private var parent: Output? = null

    private var status: Output? = null

    private var triggerId: Output? = null

    private var triggers: Output>? = null

    /**
     * @param value A description of the job trigger.
     */
    @JvmName("eunfagvrqoedgaiq")
    public suspend fun description(`value`: Output) {
        this.description = value
    }

    /**
     * @param value User set display name of the job trigger.
     */
    @JvmName("phrvxrfnbtdxlkhh")
    public suspend fun displayName(`value`: Output) {
        this.displayName = value
    }

    /**
     * @param value Controls what and how to inspect for findings.
     */
    @JvmName("ngfwkxhojvanxtrj")
    public suspend fun inspectJob(`value`: Output) {
        this.inspectJob = value
    }

    /**
     * @param value The parent of the trigger, either in the format `projects/{{project}}`
     * or `projects/{{project}}/locations/{{location}}`
     */
    @JvmName("pgomdauaghdralhx")
    public suspend fun parent(`value`: Output) {
        this.parent = value
    }

    /**
     * @param value Whether the trigger is currently active. Default value: "HEALTHY" Possible values: ["PAUSED", "HEALTHY", "CANCELLED"]
     */
    @JvmName("pgtvslinodyftcav")
    public suspend fun status(`value`: Output) {
        this.status = value
    }

    /**
     * @param value The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular
     * expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
     */
    @JvmName("cpucfhxxansigfbg")
    public suspend fun triggerId(`value`: Output) {
        this.triggerId = value
    }

    /**
     * @param value What event needs to occur for a new job to be started.
     * Structure is documented below.
     */
    @JvmName("ubdgplwuunllshcc")
    public suspend fun triggers(`value`: Output>) {
        this.triggers = value
    }

    @JvmName("watfpiasqvpdxehb")
    public suspend fun triggers(vararg values: Output) {
        this.triggers = Output.all(values.asList())
    }

    /**
     * @param values What event needs to occur for a new job to be started.
     * Structure is documented below.
     */
    @JvmName("wlpeoqvfxysggree")
    public suspend fun triggers(values: List>) {
        this.triggers = Output.all(values)
    }

    /**
     * @param value A description of the job trigger.
     */
    @JvmName("qbmhaidkccejtnax")
    public suspend fun description(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.description = mapped
    }

    /**
     * @param value User set display name of the job trigger.
     */
    @JvmName("axxqfuydbartrykp")
    public suspend fun displayName(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.displayName = mapped
    }

    /**
     * @param value Controls what and how to inspect for findings.
     */
    @JvmName("gkwhnnjqurgnyadu")
    public suspend fun inspectJob(`value`: PreventionJobTriggerInspectJobArgs?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.inspectJob = mapped
    }

    /**
     * @param argument Controls what and how to inspect for findings.
     */
    @JvmName("ilxqchrkjmjsdyyb")
    public suspend fun inspectJob(argument: suspend PreventionJobTriggerInspectJobArgsBuilder.() -> Unit) {
        val toBeMapped = PreventionJobTriggerInspectJobArgsBuilder().applySuspend { argument() }.build()
        val mapped = of(toBeMapped)
        this.inspectJob = mapped
    }

    /**
     * @param value The parent of the trigger, either in the format `projects/{{project}}`
     * or `projects/{{project}}/locations/{{location}}`
     */
    @JvmName("bkhxabopctaamvmx")
    public suspend fun parent(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.parent = mapped
    }

    /**
     * @param value Whether the trigger is currently active. Default value: "HEALTHY" Possible values: ["PAUSED", "HEALTHY", "CANCELLED"]
     */
    @JvmName("spcfblbxjcbswhjf")
    public suspend fun status(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.status = mapped
    }

    /**
     * @param value The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular
     * expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
     */
    @JvmName("gowhvyffrobxkcyn")
    public suspend fun triggerId(`value`: String?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.triggerId = mapped
    }

    /**
     * @param value What event needs to occur for a new job to be started.
     * Structure is documented below.
     */
    @JvmName("dnvyprdggyhduylq")
    public suspend fun triggers(`value`: List?) {
        val toBeMapped = value
        val mapped = toBeMapped?.let({ args0 -> of(args0) })
        this.triggers = mapped
    }

    /**
     * @param argument What event needs to occur for a new job to be started.
     * Structure is documented below.
     */
    @JvmName("omdlyavstejgnknf")
    public suspend fun triggers(argument: List Unit>) {
        val toBeMapped = argument.toList().map {
            PreventionJobTriggerTriggerArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.triggers = mapped
    }

    /**
     * @param argument What event needs to occur for a new job to be started.
     * Structure is documented below.
     */
    @JvmName("gaqrouqfnkuaboyf")
    public suspend fun triggers(vararg argument: suspend PreventionJobTriggerTriggerArgsBuilder.() -> Unit) {
        val toBeMapped = argument.toList().map {
            PreventionJobTriggerTriggerArgsBuilder().applySuspend {
                it()
            }.build()
        }
        val mapped = of(toBeMapped)
        this.triggers = mapped
    }

    /**
     * @param argument What event needs to occur for a new job to be started.
     * Structure is documented below.
     */
    @JvmName("edisaolantvxvohh")
    public suspend fun triggers(argument: suspend PreventionJobTriggerTriggerArgsBuilder.() -> Unit) {
        val toBeMapped = listOf(
            PreventionJobTriggerTriggerArgsBuilder().applySuspend {
                argument()
            }.build(),
        )
        val mapped = of(toBeMapped)
        this.triggers = mapped
    }

    /**
     * @param values What event needs to occur for a new job to be started.
     * Structure is documented below.
     */
    @JvmName("neusjbvhafxmvyvw")
    public suspend fun triggers(vararg values: PreventionJobTriggerTriggerArgs) {
        val toBeMapped = values.toList()
        val mapped = toBeMapped.let({ args0 -> of(args0) })
        this.triggers = mapped
    }

    internal fun build(): PreventionJobTriggerArgs = PreventionJobTriggerArgs(
        description = description,
        displayName = displayName,
        inspectJob = inspectJob,
        parent = parent,
        status = status,
        triggerId = triggerId,
        triggers = triggers,
    )
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy