All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.pulumi.gcp.healthcare.kotlin.PipelineJob.kt Maven / Gradle / Ivy

Go to download

Build cloud applications and infrastructure by combining the safety and reliability of infrastructure as code with the power of the Kotlin programming language.

There is a newer version: 8.20.1.0
Show newest version
@file:Suppress("NAME_SHADOWING", "DEPRECATION")

package com.pulumi.gcp.healthcare.kotlin

import com.pulumi.core.Output
import com.pulumi.gcp.healthcare.kotlin.outputs.PipelineJobBackfillPipelineJob
import com.pulumi.gcp.healthcare.kotlin.outputs.PipelineJobMappingPipelineJob
import com.pulumi.gcp.healthcare.kotlin.outputs.PipelineJobReconciliationPipelineJob
import com.pulumi.kotlin.KotlinCustomResource
import com.pulumi.kotlin.PulumiTagMarker
import com.pulumi.kotlin.ResourceMapper
import com.pulumi.kotlin.options.CustomResourceOptions
import com.pulumi.kotlin.options.CustomResourceOptionsBuilder
import com.pulumi.resources.Resource
import kotlin.Boolean
import kotlin.String
import kotlin.Suppress
import kotlin.Unit
import kotlin.collections.Map
import com.pulumi.gcp.healthcare.kotlin.outputs.PipelineJobBackfillPipelineJob.Companion.toKotlin as pipelineJobBackfillPipelineJobToKotlin
import com.pulumi.gcp.healthcare.kotlin.outputs.PipelineJobMappingPipelineJob.Companion.toKotlin as pipelineJobMappingPipelineJobToKotlin
import com.pulumi.gcp.healthcare.kotlin.outputs.PipelineJobReconciliationPipelineJob.Companion.toKotlin as pipelineJobReconciliationPipelineJobToKotlin

/**
 * Builder for [PipelineJob].
 */
@PulumiTagMarker
public class PipelineJobResourceBuilder internal constructor() {
    public var name: String? = null

    public var args: PipelineJobArgs = PipelineJobArgs()

    public var opts: CustomResourceOptions = CustomResourceOptions()

    /**
     * @param name The _unique_ name of the resulting resource.
     */
    public fun name(`value`: String) {
        this.name = value
    }

    /**
     * @param block The arguments to use to populate this resource's properties.
     */
    public suspend fun args(block: suspend PipelineJobArgsBuilder.() -> Unit) {
        val builder = PipelineJobArgsBuilder()
        block(builder)
        this.args = builder.build()
    }

    /**
     * @param block A bag of options that control this resource's behavior.
     */
    public suspend fun opts(block: suspend CustomResourceOptionsBuilder.() -> Unit) {
        this.opts = com.pulumi.kotlin.options.CustomResourceOptions.opts(block)
    }

    internal fun build(): PipelineJob {
        val builtJavaResource = com.pulumi.gcp.healthcare.PipelineJob(
            this.name,
            this.args.toJava(),
            this.opts.toJava(),
        )
        return PipelineJob(builtJavaResource)
    }
}

/**
 * PipelineJobs are Long Running Operations on Healthcare API to Map or Reconcile
 * incoming data into FHIR format
 * To get more information about PipelineJob, see:
 * * [API documentation](https://cloud.google.com/healthcare-api/healthcare-data-engine/docs/reference/rest/v1/projects.locations.datasets.pipelineJobs)
 * * How-to Guides
 *     * [Creating a PipelineJob](https://cloud.google.com/healthcare-api/private/healthcare-data-engine/docs/reference/rest/v1/projects.locations.datasets.pipelineJobs#PipelineJob)
 * ## Example Usage
 * ### Healthcare Pipeline Job Reconciliation
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const project = gcp.organizations.getProject({});
 * const dataset = new gcp.healthcare.Dataset("dataset", {
 *     name: "example_dataset",
 *     location: "us-central1",
 * });
 * const fhirstore = new gcp.healthcare.FhirStore("fhirstore", {
 *     name: "fhir_store",
 *     dataset: dataset.id,
 *     version: "R4",
 *     enableUpdateCreate: true,
 *     disableReferentialIntegrity: true,
 * });
 * const bucket = new gcp.storage.Bucket("bucket", {
 *     name: "example_bucket_name",
 *     location: "us-central1",
 *     uniformBucketLevelAccess: true,
 * });
 * const mergeFile = new gcp.storage.BucketObject("merge_file", {
 *     name: "merge.wstl",
 *     content: " ",
 *     bucket: bucket.name,
 * });
 * const example_pipeline = new gcp.healthcare.PipelineJob("example-pipeline", {
 *     name: "example_pipeline_job",
 *     location: "us-central1",
 *     dataset: dataset.id,
 *     disableLineage: true,
 *     reconciliationPipelineJob: {
 *         mergeConfig: {
 *             description: "sample description for reconciliation rules",
 *             whistleConfigSource: {
 *                 uri: pulumi.interpolate`gs://${bucket.name}/${mergeFile.name}`,
 *                 importUriPrefix: pulumi.interpolate`gs://${bucket.name}`,
 *             },
 *         },
 *         matchingUriPrefix: pulumi.interpolate`gs://${bucket.name}`,
 *         fhirStoreDestination: pulumi.interpolate`${dataset.id}/fhirStores/${fhirstore.name}`,
 *     },
 * });
 * const hsa = new gcp.storage.BucketIAMMember("hsa", {
 *     bucket: bucket.name,
 *     role: "roles/storage.objectUser",
 *     member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-healthcare.iam.gserviceaccount.com`),
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * project = gcp.organizations.get_project()
 * dataset = gcp.healthcare.Dataset("dataset",
 *     name="example_dataset",
 *     location="us-central1")
 * fhirstore = gcp.healthcare.FhirStore("fhirstore",
 *     name="fhir_store",
 *     dataset=dataset.id,
 *     version="R4",
 *     enable_update_create=True,
 *     disable_referential_integrity=True)
 * bucket = gcp.storage.Bucket("bucket",
 *     name="example_bucket_name",
 *     location="us-central1",
 *     uniform_bucket_level_access=True)
 * merge_file = gcp.storage.BucketObject("merge_file",
 *     name="merge.wstl",
 *     content=" ",
 *     bucket=bucket.name)
 * example_pipeline = gcp.healthcare.PipelineJob("example-pipeline",
 *     name="example_pipeline_job",
 *     location="us-central1",
 *     dataset=dataset.id,
 *     disable_lineage=True,
 *     reconciliation_pipeline_job={
 *         "merge_config": {
 *             "description": "sample description for reconciliation rules",
 *             "whistle_config_source": {
 *                 "uri": pulumi.Output.all(
 *                     bucketName=bucket.name,
 *                     mergeFileName=merge_file.name
 * ).apply(lambda resolved_outputs: f"gs://{resolved_outputs['bucketName']}/{resolved_outputs['mergeFileName']}")
 * ,
 *                 "import_uri_prefix": bucket.name.apply(lambda name: f"gs://{name}"),
 *             },
 *         },
 *         "matching_uri_prefix": bucket.name.apply(lambda name: f"gs://{name}"),
 *         "fhir_store_destination": pulumi.Output.all(
 *             id=dataset.id,
 *             name=fhirstore.name
 * ).apply(lambda resolved_outputs: f"{resolved_outputs['id']}/fhirStores/{resolved_outputs['name']}")
 * ,
 *     })
 * hsa = gcp.storage.BucketIAMMember("hsa",
 *     bucket=bucket.name,
 *     role="roles/storage.objectUser",
 *     member=f"serviceAccount:service-{project.number}@gcp-sa-healthcare.iam.gserviceaccount.com")
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var project = Gcp.Organizations.GetProject.Invoke();
 *     var dataset = new Gcp.Healthcare.Dataset("dataset", new()
 *     {
 *         Name = "example_dataset",
 *         Location = "us-central1",
 *     });
 *     var fhirstore = new Gcp.Healthcare.FhirStore("fhirstore", new()
 *     {
 *         Name = "fhir_store",
 *         Dataset = dataset.Id,
 *         Version = "R4",
 *         EnableUpdateCreate = true,
 *         DisableReferentialIntegrity = true,
 *     });
 *     var bucket = new Gcp.Storage.Bucket("bucket", new()
 *     {
 *         Name = "example_bucket_name",
 *         Location = "us-central1",
 *         UniformBucketLevelAccess = true,
 *     });
 *     var mergeFile = new Gcp.Storage.BucketObject("merge_file", new()
 *     {
 *         Name = "merge.wstl",
 *         Content = " ",
 *         Bucket = bucket.Name,
 *     });
 *     var example_pipeline = new Gcp.Healthcare.PipelineJob("example-pipeline", new()
 *     {
 *         Name = "example_pipeline_job",
 *         Location = "us-central1",
 *         Dataset = dataset.Id,
 *         DisableLineage = true,
 *         ReconciliationPipelineJob = new Gcp.Healthcare.Inputs.PipelineJobReconciliationPipelineJobArgs
 *         {
 *             MergeConfig = new Gcp.Healthcare.Inputs.PipelineJobReconciliationPipelineJobMergeConfigArgs
 *             {
 *                 Description = "sample description for reconciliation rules",
 *                 WhistleConfigSource = new Gcp.Healthcare.Inputs.PipelineJobReconciliationPipelineJobMergeConfigWhistleConfigSourceArgs
 *                 {
 *                     Uri = Output.Tuple(bucket.Name, mergeFile.Name).Apply(values =>
 *                     {
 *                         var bucketName = values.Item1;
 *                         var mergeFileName = values.Item2;
 *                         return $"gs://{bucketName}/{mergeFileName}";
 *                     }),
 *                     ImportUriPrefix = bucket.Name.Apply(name => $"gs://{name}"),
 *                 },
 *             },
 *             MatchingUriPrefix = bucket.Name.Apply(name => $"gs://{name}"),
 *             FhirStoreDestination = Output.Tuple(dataset.Id, fhirstore.Name).Apply(values =>
 *             {
 *                 var id = values.Item1;
 *                 var name = values.Item2;
 *                 return $"{id}/fhirStores/{name}";
 *             }),
 *         },
 *     });
 *     var hsa = new Gcp.Storage.BucketIAMMember("hsa", new()
 *     {
 *         Bucket = bucket.Name,
 *         Role = "roles/storage.objectUser",
 *         Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-healthcare.iam.gserviceaccount.com",
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"fmt"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/healthcare"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
 * 		if err != nil {
 * 			return err
 * 		}
 * 		dataset, err := healthcare.NewDataset(ctx, "dataset", &healthcare.DatasetArgs{
 * 			Name:     pulumi.String("example_dataset"),
 * 			Location: pulumi.String("us-central1"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		fhirstore, err := healthcare.NewFhirStore(ctx, "fhirstore", &healthcare.FhirStoreArgs{
 * 			Name:                        pulumi.String("fhir_store"),
 * 			Dataset:                     dataset.ID(),
 * 			Version:                     pulumi.String("R4"),
 * 			EnableUpdateCreate:          pulumi.Bool(true),
 * 			DisableReferentialIntegrity: pulumi.Bool(true),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
 * 			Name:                     pulumi.String("example_bucket_name"),
 * 			Location:                 pulumi.String("us-central1"),
 * 			UniformBucketLevelAccess: pulumi.Bool(true),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		mergeFile, err := storage.NewBucketObject(ctx, "merge_file", &storage.BucketObjectArgs{
 * 			Name:    pulumi.String("merge.wstl"),
 * 			Content: pulumi.String(" "),
 * 			Bucket:  bucket.Name,
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = healthcare.NewPipelineJob(ctx, "example-pipeline", &healthcare.PipelineJobArgs{
 * 			Name:           pulumi.String("example_pipeline_job"),
 * 			Location:       pulumi.String("us-central1"),
 * 			Dataset:        dataset.ID(),
 * 			DisableLineage: pulumi.Bool(true),
 * 			ReconciliationPipelineJob: &healthcare.PipelineJobReconciliationPipelineJobArgs{
 * 				MergeConfig: &healthcare.PipelineJobReconciliationPipelineJobMergeConfigArgs{
 * 					Description: pulumi.String("sample description for reconciliation rules"),
 * 					WhistleConfigSource: &healthcare.PipelineJobReconciliationPipelineJobMergeConfigWhistleConfigSourceArgs{
 * 						Uri: pulumi.All(bucket.Name, mergeFile.Name).ApplyT(func(_args []interface{}) (string, error) {
 * 							bucketName := _args[0].(string)
 * 							mergeFileName := _args[1].(string)
 * 							return fmt.Sprintf("gs://%v/%v", bucketName, mergeFileName), nil
 * 						}).(pulumi.StringOutput),
 * 						ImportUriPrefix: bucket.Name.ApplyT(func(name string) (string, error) {
 * 							return fmt.Sprintf("gs://%v", name), nil
 * 						}).(pulumi.StringOutput),
 * 					},
 * 				},
 * 				MatchingUriPrefix: bucket.Name.ApplyT(func(name string) (string, error) {
 * 					return fmt.Sprintf("gs://%v", name), nil
 * 				}).(pulumi.StringOutput),
 * 				FhirStoreDestination: pulumi.All(dataset.ID(), fhirstore.Name).ApplyT(func(_args []interface{}) (string, error) {
 * 					id := _args[0].(string)
 * 					name := _args[1].(string)
 * 					return fmt.Sprintf("%v/fhirStores/%v", id, name), nil
 * 				}).(pulumi.StringOutput),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = storage.NewBucketIAMMember(ctx, "hsa", &storage.BucketIAMMemberArgs{
 * 			Bucket: bucket.Name,
 * 			Role:   pulumi.String("roles/storage.objectUser"),
 * 			Member: pulumi.Sprintf("serviceAccount:service-%[email protected]", project.Number),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.organizations.OrganizationsFunctions;
 * import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
 * import com.pulumi.gcp.healthcare.Dataset;
 * import com.pulumi.gcp.healthcare.DatasetArgs;
 * import com.pulumi.gcp.healthcare.FhirStore;
 * import com.pulumi.gcp.healthcare.FhirStoreArgs;
 * import com.pulumi.gcp.storage.Bucket;
 * import com.pulumi.gcp.storage.BucketArgs;
 * import com.pulumi.gcp.storage.BucketObject;
 * import com.pulumi.gcp.storage.BucketObjectArgs;
 * import com.pulumi.gcp.healthcare.PipelineJob;
 * import com.pulumi.gcp.healthcare.PipelineJobArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobReconciliationPipelineJobArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobReconciliationPipelineJobMergeConfigArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobReconciliationPipelineJobMergeConfigWhistleConfigSourceArgs;
 * import com.pulumi.gcp.storage.BucketIAMMember;
 * import com.pulumi.gcp.storage.BucketIAMMemberArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         final var project = OrganizationsFunctions.getProject();
 *         var dataset = new Dataset("dataset", DatasetArgs.builder()
 *             .name("example_dataset")
 *             .location("us-central1")
 *             .build());
 *         var fhirstore = new FhirStore("fhirstore", FhirStoreArgs.builder()
 *             .name("fhir_store")
 *             .dataset(dataset.id())
 *             .version("R4")
 *             .enableUpdateCreate(true)
 *             .disableReferentialIntegrity(true)
 *             .build());
 *         var bucket = new Bucket("bucket", BucketArgs.builder()
 *             .name("example_bucket_name")
 *             .location("us-central1")
 *             .uniformBucketLevelAccess(true)
 *             .build());
 *         var mergeFile = new BucketObject("mergeFile", BucketObjectArgs.builder()
 *             .name("merge.wstl")
 *             .content(" ")
 *             .bucket(bucket.name())
 *             .build());
 *         var example_pipeline = new PipelineJob("example-pipeline", PipelineJobArgs.builder()
 *             .name("example_pipeline_job")
 *             .location("us-central1")
 *             .dataset(dataset.id())
 *             .disableLineage(true)
 *             .reconciliationPipelineJob(PipelineJobReconciliationPipelineJobArgs.builder()
 *                 .mergeConfig(PipelineJobReconciliationPipelineJobMergeConfigArgs.builder()
 *                     .description("sample description for reconciliation rules")
 *                     .whistleConfigSource(PipelineJobReconciliationPipelineJobMergeConfigWhistleConfigSourceArgs.builder()
 *                         .uri(Output.tuple(bucket.name(), mergeFile.name()).applyValue(values -> {
 *                             var bucketName = values.t1;
 *                             var mergeFileName = values.t2;
 *                             return String.format("gs://%s/%s", bucketName,mergeFileName);
 *                         }))
 *                         .importUriPrefix(bucket.name().applyValue(name -> String.format("gs://%s", name)))
 *                         .build())
 *                     .build())
 *                 .matchingUriPrefix(bucket.name().applyValue(name -> String.format("gs://%s", name)))
 *                 .fhirStoreDestination(Output.tuple(dataset.id(), fhirstore.name()).applyValue(values -> {
 *                     var id = values.t1;
 *                     var name = values.t2;
 *                     return String.format("%s/fhirStores/%s", id,name);
 *                 }))
 *                 .build())
 *             .build());
 *         var hsa = new BucketIAMMember("hsa", BucketIAMMemberArgs.builder()
 *             .bucket(bucket.name())
 *             .role("roles/storage.objectUser")
 *             .member(String.format("serviceAccount:service-%[email protected]", project.applyValue(getProjectResult -> getProjectResult.number())))
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   example-pipeline:
 *     type: gcp:healthcare:PipelineJob
 *     properties:
 *       name: example_pipeline_job
 *       location: us-central1
 *       dataset: ${dataset.id}
 *       disableLineage: true
 *       reconciliationPipelineJob:
 *         mergeConfig:
 *           description: sample description for reconciliation rules
 *           whistleConfigSource:
 *             uri: gs://${bucket.name}/${mergeFile.name}
 *             importUriPrefix: gs://${bucket.name}
 *         matchingUriPrefix: gs://${bucket.name}
 *         fhirStoreDestination: ${dataset.id}/fhirStores/${fhirstore.name}
 *   dataset:
 *     type: gcp:healthcare:Dataset
 *     properties:
 *       name: example_dataset
 *       location: us-central1
 *   fhirstore:
 *     type: gcp:healthcare:FhirStore
 *     properties:
 *       name: fhir_store
 *       dataset: ${dataset.id}
 *       version: R4
 *       enableUpdateCreate: true
 *       disableReferentialIntegrity: true
 *   bucket:
 *     type: gcp:storage:Bucket
 *     properties:
 *       name: example_bucket_name
 *       location: us-central1
 *       uniformBucketLevelAccess: true
 *   mergeFile:
 *     type: gcp:storage:BucketObject
 *     name: merge_file
 *     properties:
 *       name: merge.wstl
 *       content: ' '
 *       bucket: ${bucket.name}
 *   hsa:
 *     type: gcp:storage:BucketIAMMember
 *     properties:
 *       bucket: ${bucket.name}
 *       role: roles/storage.objectUser
 *       member: serviceAccount:service-${project.number}@gcp-sa-healthcare.iam.gserviceaccount.com
 * variables:
 *   project:
 *     fn::invoke:
 *       function: gcp:organizations:getProject
 *       arguments: {}
 * ```
 * 
 * ### Healthcare Pipeline Job Backfill
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const dataset = new gcp.healthcare.Dataset("dataset", {
 *     name: "example_dataset",
 *     location: "us-central1",
 * });
 * const example_pipeline = new gcp.healthcare.PipelineJob("example-pipeline", {
 *     name: "example_backfill_pipeline",
 *     location: "us-central1",
 *     dataset: dataset.id,
 *     backfillPipelineJob: {
 *         mappingPipelineJob: pulumi.interpolate`${dataset.id}/pipelinejobs/example_mapping_pipeline`,
 *     },
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * dataset = gcp.healthcare.Dataset("dataset",
 *     name="example_dataset",
 *     location="us-central1")
 * example_pipeline = gcp.healthcare.PipelineJob("example-pipeline",
 *     name="example_backfill_pipeline",
 *     location="us-central1",
 *     dataset=dataset.id,
 *     backfill_pipeline_job={
 *         "mapping_pipeline_job": dataset.id.apply(lambda id: f"{id}/pipelinejobs/example_mapping_pipeline"),
 *     })
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var dataset = new Gcp.Healthcare.Dataset("dataset", new()
 *     {
 *         Name = "example_dataset",
 *         Location = "us-central1",
 *     });
 *     var example_pipeline = new Gcp.Healthcare.PipelineJob("example-pipeline", new()
 *     {
 *         Name = "example_backfill_pipeline",
 *         Location = "us-central1",
 *         Dataset = dataset.Id,
 *         BackfillPipelineJob = new Gcp.Healthcare.Inputs.PipelineJobBackfillPipelineJobArgs
 *         {
 *             MappingPipelineJob = dataset.Id.Apply(id => $"{id}/pipelinejobs/example_mapping_pipeline"),
 *         },
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"fmt"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/healthcare"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		dataset, err := healthcare.NewDataset(ctx, "dataset", &healthcare.DatasetArgs{
 * 			Name:     pulumi.String("example_dataset"),
 * 			Location: pulumi.String("us-central1"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = healthcare.NewPipelineJob(ctx, "example-pipeline", &healthcare.PipelineJobArgs{
 * 			Name:     pulumi.String("example_backfill_pipeline"),
 * 			Location: pulumi.String("us-central1"),
 * 			Dataset:  dataset.ID(),
 * 			BackfillPipelineJob: &healthcare.PipelineJobBackfillPipelineJobArgs{
 * 				MappingPipelineJob: dataset.ID().ApplyT(func(id string) (string, error) {
 * 					return fmt.Sprintf("%v/pipelinejobs/example_mapping_pipeline", id), nil
 * 				}).(pulumi.StringOutput),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.healthcare.Dataset;
 * import com.pulumi.gcp.healthcare.DatasetArgs;
 * import com.pulumi.gcp.healthcare.PipelineJob;
 * import com.pulumi.gcp.healthcare.PipelineJobArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobBackfillPipelineJobArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         var dataset = new Dataset("dataset", DatasetArgs.builder()
 *             .name("example_dataset")
 *             .location("us-central1")
 *             .build());
 *         var example_pipeline = new PipelineJob("example-pipeline", PipelineJobArgs.builder()
 *             .name("example_backfill_pipeline")
 *             .location("us-central1")
 *             .dataset(dataset.id())
 *             .backfillPipelineJob(PipelineJobBackfillPipelineJobArgs.builder()
 *                 .mappingPipelineJob(dataset.id().applyValue(id -> String.format("%s/pipelinejobs/example_mapping_pipeline", id)))
 *                 .build())
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   example-pipeline:
 *     type: gcp:healthcare:PipelineJob
 *     properties:
 *       name: example_backfill_pipeline
 *       location: us-central1
 *       dataset: ${dataset.id}
 *       backfillPipelineJob:
 *         mappingPipelineJob: ${dataset.id}/pipelinejobs/example_mapping_pipeline
 *   dataset:
 *     type: gcp:healthcare:Dataset
 *     properties:
 *       name: example_dataset
 *       location: us-central1
 * ```
 * 
 * ### Healthcare Pipeline Job Whistle Mapping
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const project = gcp.organizations.getProject({});
 * const dataset = new gcp.healthcare.Dataset("dataset", {
 *     name: "example_dataset",
 *     location: "us-central1",
 * });
 * const sourceFhirstore = new gcp.healthcare.FhirStore("source_fhirstore", {
 *     name: "source_fhir_store",
 *     dataset: dataset.id,
 *     version: "R4",
 *     enableUpdateCreate: true,
 *     disableReferentialIntegrity: true,
 * });
 * const destFhirstore = new gcp.healthcare.FhirStore("dest_fhirstore", {
 *     name: "dest_fhir_store",
 *     dataset: dataset.id,
 *     version: "R4",
 *     enableUpdateCreate: true,
 *     disableReferentialIntegrity: true,
 * });
 * const bucket = new gcp.storage.Bucket("bucket", {
 *     name: "example_bucket_name",
 *     location: "us-central1",
 *     uniformBucketLevelAccess: true,
 * });
 * const mappingFile = new gcp.storage.BucketObject("mapping_file", {
 *     name: "mapping.wstl",
 *     content: " ",
 *     bucket: bucket.name,
 * });
 * const example_mapping_pipeline = new gcp.healthcare.PipelineJob("example-mapping-pipeline", {
 *     name: "example_mapping_pipeline_job",
 *     location: "us-central1",
 *     dataset: dataset.id,
 *     disableLineage: true,
 *     labels: {
 *         example_label_key: "example_label_value",
 *     },
 *     mappingPipelineJob: {
 *         mappingConfig: {
 *             whistleConfigSource: {
 *                 uri: pulumi.interpolate`gs://${bucket.name}/${mappingFile.name}`,
 *                 importUriPrefix: pulumi.interpolate`gs://${bucket.name}`,
 *             },
 *             description: "example description for mapping configuration",
 *         },
 *         fhirStreamingSource: {
 *             fhirStore: pulumi.interpolate`${dataset.id}/fhirStores/${sourceFhirstore.name}`,
 *             description: "example description for streaming fhirstore",
 *         },
 *         fhirStoreDestination: pulumi.interpolate`${dataset.id}/fhirStores/${destFhirstore.name}`,
 *     },
 * });
 * const hsa = new gcp.storage.BucketIAMMember("hsa", {
 *     bucket: bucket.name,
 *     role: "roles/storage.objectUser",
 *     member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-healthcare.iam.gserviceaccount.com`),
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * project = gcp.organizations.get_project()
 * dataset = gcp.healthcare.Dataset("dataset",
 *     name="example_dataset",
 *     location="us-central1")
 * source_fhirstore = gcp.healthcare.FhirStore("source_fhirstore",
 *     name="source_fhir_store",
 *     dataset=dataset.id,
 *     version="R4",
 *     enable_update_create=True,
 *     disable_referential_integrity=True)
 * dest_fhirstore = gcp.healthcare.FhirStore("dest_fhirstore",
 *     name="dest_fhir_store",
 *     dataset=dataset.id,
 *     version="R4",
 *     enable_update_create=True,
 *     disable_referential_integrity=True)
 * bucket = gcp.storage.Bucket("bucket",
 *     name="example_bucket_name",
 *     location="us-central1",
 *     uniform_bucket_level_access=True)
 * mapping_file = gcp.storage.BucketObject("mapping_file",
 *     name="mapping.wstl",
 *     content=" ",
 *     bucket=bucket.name)
 * example_mapping_pipeline = gcp.healthcare.PipelineJob("example-mapping-pipeline",
 *     name="example_mapping_pipeline_job",
 *     location="us-central1",
 *     dataset=dataset.id,
 *     disable_lineage=True,
 *     labels={
 *         "example_label_key": "example_label_value",
 *     },
 *     mapping_pipeline_job={
 *         "mapping_config": {
 *             "whistle_config_source": {
 *                 "uri": pulumi.Output.all(
 *                     bucketName=bucket.name,
 *                     mappingFileName=mapping_file.name
 * ).apply(lambda resolved_outputs: f"gs://{resolved_outputs['bucketName']}/{resolved_outputs['mappingFileName']}")
 * ,
 *                 "import_uri_prefix": bucket.name.apply(lambda name: f"gs://{name}"),
 *             },
 *             "description": "example description for mapping configuration",
 *         },
 *         "fhir_streaming_source": {
 *             "fhir_store": pulumi.Output.all(
 *                 id=dataset.id,
 *                 name=source_fhirstore.name
 * ).apply(lambda resolved_outputs: f"{resolved_outputs['id']}/fhirStores/{resolved_outputs['name']}")
 * ,
 *             "description": "example description for streaming fhirstore",
 *         },
 *         "fhir_store_destination": pulumi.Output.all(
 *             id=dataset.id,
 *             name=dest_fhirstore.name
 * ).apply(lambda resolved_outputs: f"{resolved_outputs['id']}/fhirStores/{resolved_outputs['name']}")
 * ,
 *     })
 * hsa = gcp.storage.BucketIAMMember("hsa",
 *     bucket=bucket.name,
 *     role="roles/storage.objectUser",
 *     member=f"serviceAccount:service-{project.number}@gcp-sa-healthcare.iam.gserviceaccount.com")
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var project = Gcp.Organizations.GetProject.Invoke();
 *     var dataset = new Gcp.Healthcare.Dataset("dataset", new()
 *     {
 *         Name = "example_dataset",
 *         Location = "us-central1",
 *     });
 *     var sourceFhirstore = new Gcp.Healthcare.FhirStore("source_fhirstore", new()
 *     {
 *         Name = "source_fhir_store",
 *         Dataset = dataset.Id,
 *         Version = "R4",
 *         EnableUpdateCreate = true,
 *         DisableReferentialIntegrity = true,
 *     });
 *     var destFhirstore = new Gcp.Healthcare.FhirStore("dest_fhirstore", new()
 *     {
 *         Name = "dest_fhir_store",
 *         Dataset = dataset.Id,
 *         Version = "R4",
 *         EnableUpdateCreate = true,
 *         DisableReferentialIntegrity = true,
 *     });
 *     var bucket = new Gcp.Storage.Bucket("bucket", new()
 *     {
 *         Name = "example_bucket_name",
 *         Location = "us-central1",
 *         UniformBucketLevelAccess = true,
 *     });
 *     var mappingFile = new Gcp.Storage.BucketObject("mapping_file", new()
 *     {
 *         Name = "mapping.wstl",
 *         Content = " ",
 *         Bucket = bucket.Name,
 *     });
 *     var example_mapping_pipeline = new Gcp.Healthcare.PipelineJob("example-mapping-pipeline", new()
 *     {
 *         Name = "example_mapping_pipeline_job",
 *         Location = "us-central1",
 *         Dataset = dataset.Id,
 *         DisableLineage = true,
 *         Labels =
 *         {
 *             { "example_label_key", "example_label_value" },
 *         },
 *         MappingPipelineJob = new Gcp.Healthcare.Inputs.PipelineJobMappingPipelineJobArgs
 *         {
 *             MappingConfig = new Gcp.Healthcare.Inputs.PipelineJobMappingPipelineJobMappingConfigArgs
 *             {
 *                 WhistleConfigSource = new Gcp.Healthcare.Inputs.PipelineJobMappingPipelineJobMappingConfigWhistleConfigSourceArgs
 *                 {
 *                     Uri = Output.Tuple(bucket.Name, mappingFile.Name).Apply(values =>
 *                     {
 *                         var bucketName = values.Item1;
 *                         var mappingFileName = values.Item2;
 *                         return $"gs://{bucketName}/{mappingFileName}";
 *                     }),
 *                     ImportUriPrefix = bucket.Name.Apply(name => $"gs://{name}"),
 *                 },
 *                 Description = "example description for mapping configuration",
 *             },
 *             FhirStreamingSource = new Gcp.Healthcare.Inputs.PipelineJobMappingPipelineJobFhirStreamingSourceArgs
 *             {
 *                 FhirStore = Output.Tuple(dataset.Id, sourceFhirstore.Name).Apply(values =>
 *                 {
 *                     var id = values.Item1;
 *                     var name = values.Item2;
 *                     return $"{id}/fhirStores/{name}";
 *                 }),
 *                 Description = "example description for streaming fhirstore",
 *             },
 *             FhirStoreDestination = Output.Tuple(dataset.Id, destFhirstore.Name).Apply(values =>
 *             {
 *                 var id = values.Item1;
 *                 var name = values.Item2;
 *                 return $"{id}/fhirStores/{name}";
 *             }),
 *         },
 *     });
 *     var hsa = new Gcp.Storage.BucketIAMMember("hsa", new()
 *     {
 *         Bucket = bucket.Name,
 *         Role = "roles/storage.objectUser",
 *         Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-healthcare.iam.gserviceaccount.com",
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"fmt"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/healthcare"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
 * 		if err != nil {
 * 			return err
 * 		}
 * 		dataset, err := healthcare.NewDataset(ctx, "dataset", &healthcare.DatasetArgs{
 * 			Name:     pulumi.String("example_dataset"),
 * 			Location: pulumi.String("us-central1"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		sourceFhirstore, err := healthcare.NewFhirStore(ctx, "source_fhirstore", &healthcare.FhirStoreArgs{
 * 			Name:                        pulumi.String("source_fhir_store"),
 * 			Dataset:                     dataset.ID(),
 * 			Version:                     pulumi.String("R4"),
 * 			EnableUpdateCreate:          pulumi.Bool(true),
 * 			DisableReferentialIntegrity: pulumi.Bool(true),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		destFhirstore, err := healthcare.NewFhirStore(ctx, "dest_fhirstore", &healthcare.FhirStoreArgs{
 * 			Name:                        pulumi.String("dest_fhir_store"),
 * 			Dataset:                     dataset.ID(),
 * 			Version:                     pulumi.String("R4"),
 * 			EnableUpdateCreate:          pulumi.Bool(true),
 * 			DisableReferentialIntegrity: pulumi.Bool(true),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
 * 			Name:                     pulumi.String("example_bucket_name"),
 * 			Location:                 pulumi.String("us-central1"),
 * 			UniformBucketLevelAccess: pulumi.Bool(true),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		mappingFile, err := storage.NewBucketObject(ctx, "mapping_file", &storage.BucketObjectArgs{
 * 			Name:    pulumi.String("mapping.wstl"),
 * 			Content: pulumi.String(" "),
 * 			Bucket:  bucket.Name,
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = healthcare.NewPipelineJob(ctx, "example-mapping-pipeline", &healthcare.PipelineJobArgs{
 * 			Name:           pulumi.String("example_mapping_pipeline_job"),
 * 			Location:       pulumi.String("us-central1"),
 * 			Dataset:        dataset.ID(),
 * 			DisableLineage: pulumi.Bool(true),
 * 			Labels: pulumi.StringMap{
 * 				"example_label_key": pulumi.String("example_label_value"),
 * 			},
 * 			MappingPipelineJob: &healthcare.PipelineJobMappingPipelineJobArgs{
 * 				MappingConfig: &healthcare.PipelineJobMappingPipelineJobMappingConfigArgs{
 * 					WhistleConfigSource: &healthcare.PipelineJobMappingPipelineJobMappingConfigWhistleConfigSourceArgs{
 * 						Uri: pulumi.All(bucket.Name, mappingFile.Name).ApplyT(func(_args []interface{}) (string, error) {
 * 							bucketName := _args[0].(string)
 * 							mappingFileName := _args[1].(string)
 * 							return fmt.Sprintf("gs://%v/%v", bucketName, mappingFileName), nil
 * 						}).(pulumi.StringOutput),
 * 						ImportUriPrefix: bucket.Name.ApplyT(func(name string) (string, error) {
 * 							return fmt.Sprintf("gs://%v", name), nil
 * 						}).(pulumi.StringOutput),
 * 					},
 * 					Description: pulumi.String("example description for mapping configuration"),
 * 				},
 * 				FhirStreamingSource: &healthcare.PipelineJobMappingPipelineJobFhirStreamingSourceArgs{
 * 					FhirStore: pulumi.All(dataset.ID(), sourceFhirstore.Name).ApplyT(func(_args []interface{}) (string, error) {
 * 						id := _args[0].(string)
 * 						name := _args[1].(string)
 * 						return fmt.Sprintf("%v/fhirStores/%v", id, name), nil
 * 					}).(pulumi.StringOutput),
 * 					Description: pulumi.String("example description for streaming fhirstore"),
 * 				},
 * 				FhirStoreDestination: pulumi.All(dataset.ID(), destFhirstore.Name).ApplyT(func(_args []interface{}) (string, error) {
 * 					id := _args[0].(string)
 * 					name := _args[1].(string)
 * 					return fmt.Sprintf("%v/fhirStores/%v", id, name), nil
 * 				}).(pulumi.StringOutput),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = storage.NewBucketIAMMember(ctx, "hsa", &storage.BucketIAMMemberArgs{
 * 			Bucket: bucket.Name,
 * 			Role:   pulumi.String("roles/storage.objectUser"),
 * 			Member: pulumi.Sprintf("serviceAccount:service-%[email protected]", project.Number),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.organizations.OrganizationsFunctions;
 * import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
 * import com.pulumi.gcp.healthcare.Dataset;
 * import com.pulumi.gcp.healthcare.DatasetArgs;
 * import com.pulumi.gcp.healthcare.FhirStore;
 * import com.pulumi.gcp.healthcare.FhirStoreArgs;
 * import com.pulumi.gcp.storage.Bucket;
 * import com.pulumi.gcp.storage.BucketArgs;
 * import com.pulumi.gcp.storage.BucketObject;
 * import com.pulumi.gcp.storage.BucketObjectArgs;
 * import com.pulumi.gcp.healthcare.PipelineJob;
 * import com.pulumi.gcp.healthcare.PipelineJobArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobMappingPipelineJobArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobMappingPipelineJobMappingConfigArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobMappingPipelineJobMappingConfigWhistleConfigSourceArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobMappingPipelineJobFhirStreamingSourceArgs;
 * import com.pulumi.gcp.storage.BucketIAMMember;
 * import com.pulumi.gcp.storage.BucketIAMMemberArgs;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         final var project = OrganizationsFunctions.getProject();
 *         var dataset = new Dataset("dataset", DatasetArgs.builder()
 *             .name("example_dataset")
 *             .location("us-central1")
 *             .build());
 *         var sourceFhirstore = new FhirStore("sourceFhirstore", FhirStoreArgs.builder()
 *             .name("source_fhir_store")
 *             .dataset(dataset.id())
 *             .version("R4")
 *             .enableUpdateCreate(true)
 *             .disableReferentialIntegrity(true)
 *             .build());
 *         var destFhirstore = new FhirStore("destFhirstore", FhirStoreArgs.builder()
 *             .name("dest_fhir_store")
 *             .dataset(dataset.id())
 *             .version("R4")
 *             .enableUpdateCreate(true)
 *             .disableReferentialIntegrity(true)
 *             .build());
 *         var bucket = new Bucket("bucket", BucketArgs.builder()
 *             .name("example_bucket_name")
 *             .location("us-central1")
 *             .uniformBucketLevelAccess(true)
 *             .build());
 *         var mappingFile = new BucketObject("mappingFile", BucketObjectArgs.builder()
 *             .name("mapping.wstl")
 *             .content(" ")
 *             .bucket(bucket.name())
 *             .build());
 *         var example_mapping_pipeline = new PipelineJob("example-mapping-pipeline", PipelineJobArgs.builder()
 *             .name("example_mapping_pipeline_job")
 *             .location("us-central1")
 *             .dataset(dataset.id())
 *             .disableLineage(true)
 *             .labels(Map.of("example_label_key", "example_label_value"))
 *             .mappingPipelineJob(PipelineJobMappingPipelineJobArgs.builder()
 *                 .mappingConfig(PipelineJobMappingPipelineJobMappingConfigArgs.builder()
 *                     .whistleConfigSource(PipelineJobMappingPipelineJobMappingConfigWhistleConfigSourceArgs.builder()
 *                         .uri(Output.tuple(bucket.name(), mappingFile.name()).applyValue(values -> {
 *                             var bucketName = values.t1;
 *                             var mappingFileName = values.t2;
 *                             return String.format("gs://%s/%s", bucketName,mappingFileName);
 *                         }))
 *                         .importUriPrefix(bucket.name().applyValue(name -> String.format("gs://%s", name)))
 *                         .build())
 *                     .description("example description for mapping configuration")
 *                     .build())
 *                 .fhirStreamingSource(PipelineJobMappingPipelineJobFhirStreamingSourceArgs.builder()
 *                     .fhirStore(Output.tuple(dataset.id(), sourceFhirstore.name()).applyValue(values -> {
 *                         var id = values.t1;
 *                         var name = values.t2;
 *                         return String.format("%s/fhirStores/%s", id,name);
 *                     }))
 *                     .description("example description for streaming fhirstore")
 *                     .build())
 *                 .fhirStoreDestination(Output.tuple(dataset.id(), destFhirstore.name()).applyValue(values -> {
 *                     var id = values.t1;
 *                     var name = values.t2;
 *                     return String.format("%s/fhirStores/%s", id,name);
 *                 }))
 *                 .build())
 *             .build());
 *         var hsa = new BucketIAMMember("hsa", BucketIAMMemberArgs.builder()
 *             .bucket(bucket.name())
 *             .role("roles/storage.objectUser")
 *             .member(String.format("serviceAccount:service-%[email protected]", project.applyValue(getProjectResult -> getProjectResult.number())))
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   example-mapping-pipeline:
 *     type: gcp:healthcare:PipelineJob
 *     properties:
 *       name: example_mapping_pipeline_job
 *       location: us-central1
 *       dataset: ${dataset.id}
 *       disableLineage: true
 *       labels:
 *         example_label_key: example_label_value
 *       mappingPipelineJob:
 *         mappingConfig:
 *           whistleConfigSource:
 *             uri: gs://${bucket.name}/${mappingFile.name}
 *             importUriPrefix: gs://${bucket.name}
 *           description: example description for mapping configuration
 *         fhirStreamingSource:
 *           fhirStore: ${dataset.id}/fhirStores/${sourceFhirstore.name}
 *           description: example description for streaming fhirstore
 *         fhirStoreDestination: ${dataset.id}/fhirStores/${destFhirstore.name}
 *   dataset:
 *     type: gcp:healthcare:Dataset
 *     properties:
 *       name: example_dataset
 *       location: us-central1
 *   sourceFhirstore:
 *     type: gcp:healthcare:FhirStore
 *     name: source_fhirstore
 *     properties:
 *       name: source_fhir_store
 *       dataset: ${dataset.id}
 *       version: R4
 *       enableUpdateCreate: true
 *       disableReferentialIntegrity: true
 *   destFhirstore:
 *     type: gcp:healthcare:FhirStore
 *     name: dest_fhirstore
 *     properties:
 *       name: dest_fhir_store
 *       dataset: ${dataset.id}
 *       version: R4
 *       enableUpdateCreate: true
 *       disableReferentialIntegrity: true
 *   bucket:
 *     type: gcp:storage:Bucket
 *     properties:
 *       name: example_bucket_name
 *       location: us-central1
 *       uniformBucketLevelAccess: true
 *   mappingFile:
 *     type: gcp:storage:BucketObject
 *     name: mapping_file
 *     properties:
 *       name: mapping.wstl
 *       content: ' '
 *       bucket: ${bucket.name}
 *   hsa:
 *     type: gcp:storage:BucketIAMMember
 *     properties:
 *       bucket: ${bucket.name}
 *       role: roles/storage.objectUser
 *       member: serviceAccount:service-${project.number}@gcp-sa-healthcare.iam.gserviceaccount.com
 * variables:
 *   project:
 *     fn::invoke:
 *       function: gcp:organizations:getProject
 *       arguments: {}
 * ```
 * 
 * ### Healthcare Pipeline Job Mapping Recon Dest
 * 
 * ```typescript
 * import * as pulumi from "@pulumi/pulumi";
 * import * as gcp from "@pulumi/gcp";
 * const project = gcp.organizations.getProject({});
 * const dataset = new gcp.healthcare.Dataset("dataset", {
 *     name: "example_dataset",
 *     location: "us-central1",
 * });
 * const destFhirstore = new gcp.healthcare.FhirStore("dest_fhirstore", {
 *     name: "dest_fhir_store",
 *     dataset: dataset.id,
 *     version: "R4",
 *     enableUpdateCreate: true,
 *     disableReferentialIntegrity: true,
 * });
 * const bucket = new gcp.storage.Bucket("bucket", {
 *     name: "example_bucket_name",
 *     location: "us-central1",
 *     uniformBucketLevelAccess: true,
 * });
 * const mergeFile = new gcp.storage.BucketObject("merge_file", {
 *     name: "merge.wstl",
 *     content: " ",
 *     bucket: bucket.name,
 * });
 * const recon = new gcp.healthcare.PipelineJob("recon", {
 *     name: "example_recon_pipeline_job",
 *     location: "us-central1",
 *     dataset: dataset.id,
 *     disableLineage: true,
 *     reconciliationPipelineJob: {
 *         mergeConfig: {
 *             description: "sample description for reconciliation rules",
 *             whistleConfigSource: {
 *                 uri: pulumi.interpolate`gs://${bucket.name}/${mergeFile.name}`,
 *                 importUriPrefix: pulumi.interpolate`gs://${bucket.name}`,
 *             },
 *         },
 *         matchingUriPrefix: pulumi.interpolate`gs://${bucket.name}`,
 *         fhirStoreDestination: pulumi.interpolate`${dataset.id}/fhirStores/${destFhirstore.name}`,
 *     },
 * });
 * const sourceFhirstore = new gcp.healthcare.FhirStore("source_fhirstore", {
 *     name: "source_fhir_store",
 *     dataset: dataset.id,
 *     version: "R4",
 *     enableUpdateCreate: true,
 *     disableReferentialIntegrity: true,
 * });
 * const mappingFile = new gcp.storage.BucketObject("mapping_file", {
 *     name: "mapping.wstl",
 *     content: " ",
 *     bucket: bucket.name,
 * });
 * const example_mapping_pipeline = new gcp.healthcare.PipelineJob("example-mapping-pipeline", {
 *     name: "example_mapping_pipeline_job",
 *     location: "us-central1",
 *     dataset: dataset.id,
 *     disableLineage: true,
 *     labels: {
 *         example_label_key: "example_label_value",
 *     },
 *     mappingPipelineJob: {
 *         mappingConfig: {
 *             whistleConfigSource: {
 *                 uri: pulumi.interpolate`gs://${bucket.name}/${mappingFile.name}`,
 *                 importUriPrefix: pulumi.interpolate`gs://${bucket.name}`,
 *             },
 *             description: "example description for mapping configuration",
 *         },
 *         fhirStreamingSource: {
 *             fhirStore: pulumi.interpolate`${dataset.id}/fhirStores/${sourceFhirstore.name}`,
 *             description: "example description for streaming fhirstore",
 *         },
 *         reconciliationDestination: true,
 *     },
 * }, {
 *     dependsOn: [recon],
 * });
 * const hsa = new gcp.storage.BucketIAMMember("hsa", {
 *     bucket: bucket.name,
 *     role: "roles/storage.objectUser",
 *     member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-healthcare.iam.gserviceaccount.com`),
 * });
 * ```
 * ```python
 * import pulumi
 * import pulumi_gcp as gcp
 * project = gcp.organizations.get_project()
 * dataset = gcp.healthcare.Dataset("dataset",
 *     name="example_dataset",
 *     location="us-central1")
 * dest_fhirstore = gcp.healthcare.FhirStore("dest_fhirstore",
 *     name="dest_fhir_store",
 *     dataset=dataset.id,
 *     version="R4",
 *     enable_update_create=True,
 *     disable_referential_integrity=True)
 * bucket = gcp.storage.Bucket("bucket",
 *     name="example_bucket_name",
 *     location="us-central1",
 *     uniform_bucket_level_access=True)
 * merge_file = gcp.storage.BucketObject("merge_file",
 *     name="merge.wstl",
 *     content=" ",
 *     bucket=bucket.name)
 * recon = gcp.healthcare.PipelineJob("recon",
 *     name="example_recon_pipeline_job",
 *     location="us-central1",
 *     dataset=dataset.id,
 *     disable_lineage=True,
 *     reconciliation_pipeline_job={
 *         "merge_config": {
 *             "description": "sample description for reconciliation rules",
 *             "whistle_config_source": {
 *                 "uri": pulumi.Output.all(
 *                     bucketName=bucket.name,
 *                     mergeFileName=merge_file.name
 * ).apply(lambda resolved_outputs: f"gs://{resolved_outputs['bucketName']}/{resolved_outputs['mergeFileName']}")
 * ,
 *                 "import_uri_prefix": bucket.name.apply(lambda name: f"gs://{name}"),
 *             },
 *         },
 *         "matching_uri_prefix": bucket.name.apply(lambda name: f"gs://{name}"),
 *         "fhir_store_destination": pulumi.Output.all(
 *             id=dataset.id,
 *             name=dest_fhirstore.name
 * ).apply(lambda resolved_outputs: f"{resolved_outputs['id']}/fhirStores/{resolved_outputs['name']}")
 * ,
 *     })
 * source_fhirstore = gcp.healthcare.FhirStore("source_fhirstore",
 *     name="source_fhir_store",
 *     dataset=dataset.id,
 *     version="R4",
 *     enable_update_create=True,
 *     disable_referential_integrity=True)
 * mapping_file = gcp.storage.BucketObject("mapping_file",
 *     name="mapping.wstl",
 *     content=" ",
 *     bucket=bucket.name)
 * example_mapping_pipeline = gcp.healthcare.PipelineJob("example-mapping-pipeline",
 *     name="example_mapping_pipeline_job",
 *     location="us-central1",
 *     dataset=dataset.id,
 *     disable_lineage=True,
 *     labels={
 *         "example_label_key": "example_label_value",
 *     },
 *     mapping_pipeline_job={
 *         "mapping_config": {
 *             "whistle_config_source": {
 *                 "uri": pulumi.Output.all(
 *                     bucketName=bucket.name,
 *                     mappingFileName=mapping_file.name
 * ).apply(lambda resolved_outputs: f"gs://{resolved_outputs['bucketName']}/{resolved_outputs['mappingFileName']}")
 * ,
 *                 "import_uri_prefix": bucket.name.apply(lambda name: f"gs://{name}"),
 *             },
 *             "description": "example description for mapping configuration",
 *         },
 *         "fhir_streaming_source": {
 *             "fhir_store": pulumi.Output.all(
 *                 id=dataset.id,
 *                 name=source_fhirstore.name
 * ).apply(lambda resolved_outputs: f"{resolved_outputs['id']}/fhirStores/{resolved_outputs['name']}")
 * ,
 *             "description": "example description for streaming fhirstore",
 *         },
 *         "reconciliation_destination": True,
 *     },
 *     opts = pulumi.ResourceOptions(depends_on=[recon]))
 * hsa = gcp.storage.BucketIAMMember("hsa",
 *     bucket=bucket.name,
 *     role="roles/storage.objectUser",
 *     member=f"serviceAccount:service-{project.number}@gcp-sa-healthcare.iam.gserviceaccount.com")
 * ```
 * ```csharp
 * using System.Collections.Generic;
 * using System.Linq;
 * using Pulumi;
 * using Gcp = Pulumi.Gcp;
 * return await Deployment.RunAsync(() =>
 * {
 *     var project = Gcp.Organizations.GetProject.Invoke();
 *     var dataset = new Gcp.Healthcare.Dataset("dataset", new()
 *     {
 *         Name = "example_dataset",
 *         Location = "us-central1",
 *     });
 *     var destFhirstore = new Gcp.Healthcare.FhirStore("dest_fhirstore", new()
 *     {
 *         Name = "dest_fhir_store",
 *         Dataset = dataset.Id,
 *         Version = "R4",
 *         EnableUpdateCreate = true,
 *         DisableReferentialIntegrity = true,
 *     });
 *     var bucket = new Gcp.Storage.Bucket("bucket", new()
 *     {
 *         Name = "example_bucket_name",
 *         Location = "us-central1",
 *         UniformBucketLevelAccess = true,
 *     });
 *     var mergeFile = new Gcp.Storage.BucketObject("merge_file", new()
 *     {
 *         Name = "merge.wstl",
 *         Content = " ",
 *         Bucket = bucket.Name,
 *     });
 *     var recon = new Gcp.Healthcare.PipelineJob("recon", new()
 *     {
 *         Name = "example_recon_pipeline_job",
 *         Location = "us-central1",
 *         Dataset = dataset.Id,
 *         DisableLineage = true,
 *         ReconciliationPipelineJob = new Gcp.Healthcare.Inputs.PipelineJobReconciliationPipelineJobArgs
 *         {
 *             MergeConfig = new Gcp.Healthcare.Inputs.PipelineJobReconciliationPipelineJobMergeConfigArgs
 *             {
 *                 Description = "sample description for reconciliation rules",
 *                 WhistleConfigSource = new Gcp.Healthcare.Inputs.PipelineJobReconciliationPipelineJobMergeConfigWhistleConfigSourceArgs
 *                 {
 *                     Uri = Output.Tuple(bucket.Name, mergeFile.Name).Apply(values =>
 *                     {
 *                         var bucketName = values.Item1;
 *                         var mergeFileName = values.Item2;
 *                         return $"gs://{bucketName}/{mergeFileName}";
 *                     }),
 *                     ImportUriPrefix = bucket.Name.Apply(name => $"gs://{name}"),
 *                 },
 *             },
 *             MatchingUriPrefix = bucket.Name.Apply(name => $"gs://{name}"),
 *             FhirStoreDestination = Output.Tuple(dataset.Id, destFhirstore.Name).Apply(values =>
 *             {
 *                 var id = values.Item1;
 *                 var name = values.Item2;
 *                 return $"{id}/fhirStores/{name}";
 *             }),
 *         },
 *     });
 *     var sourceFhirstore = new Gcp.Healthcare.FhirStore("source_fhirstore", new()
 *     {
 *         Name = "source_fhir_store",
 *         Dataset = dataset.Id,
 *         Version = "R4",
 *         EnableUpdateCreate = true,
 *         DisableReferentialIntegrity = true,
 *     });
 *     var mappingFile = new Gcp.Storage.BucketObject("mapping_file", new()
 *     {
 *         Name = "mapping.wstl",
 *         Content = " ",
 *         Bucket = bucket.Name,
 *     });
 *     var example_mapping_pipeline = new Gcp.Healthcare.PipelineJob("example-mapping-pipeline", new()
 *     {
 *         Name = "example_mapping_pipeline_job",
 *         Location = "us-central1",
 *         Dataset = dataset.Id,
 *         DisableLineage = true,
 *         Labels =
 *         {
 *             { "example_label_key", "example_label_value" },
 *         },
 *         MappingPipelineJob = new Gcp.Healthcare.Inputs.PipelineJobMappingPipelineJobArgs
 *         {
 *             MappingConfig = new Gcp.Healthcare.Inputs.PipelineJobMappingPipelineJobMappingConfigArgs
 *             {
 *                 WhistleConfigSource = new Gcp.Healthcare.Inputs.PipelineJobMappingPipelineJobMappingConfigWhistleConfigSourceArgs
 *                 {
 *                     Uri = Output.Tuple(bucket.Name, mappingFile.Name).Apply(values =>
 *                     {
 *                         var bucketName = values.Item1;
 *                         var mappingFileName = values.Item2;
 *                         return $"gs://{bucketName}/{mappingFileName}";
 *                     }),
 *                     ImportUriPrefix = bucket.Name.Apply(name => $"gs://{name}"),
 *                 },
 *                 Description = "example description for mapping configuration",
 *             },
 *             FhirStreamingSource = new Gcp.Healthcare.Inputs.PipelineJobMappingPipelineJobFhirStreamingSourceArgs
 *             {
 *                 FhirStore = Output.Tuple(dataset.Id, sourceFhirstore.Name).Apply(values =>
 *                 {
 *                     var id = values.Item1;
 *                     var name = values.Item2;
 *                     return $"{id}/fhirStores/{name}";
 *                 }),
 *                 Description = "example description for streaming fhirstore",
 *             },
 *             ReconciliationDestination = true,
 *         },
 *     }, new CustomResourceOptions
 *     {
 *         DependsOn =
 *         {
 *             recon,
 *         },
 *     });
 *     var hsa = new Gcp.Storage.BucketIAMMember("hsa", new()
 *     {
 *         Bucket = bucket.Name,
 *         Role = "roles/storage.objectUser",
 *         Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-healthcare.iam.gserviceaccount.com",
 *     });
 * });
 * ```
 * ```go
 * package main
 * import (
 * 	"fmt"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/healthcare"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
 * 	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
 * 	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
 * )
 * func main() {
 * 	pulumi.Run(func(ctx *pulumi.Context) error {
 * 		project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
 * 		if err != nil {
 * 			return err
 * 		}
 * 		dataset, err := healthcare.NewDataset(ctx, "dataset", &healthcare.DatasetArgs{
 * 			Name:     pulumi.String("example_dataset"),
 * 			Location: pulumi.String("us-central1"),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		destFhirstore, err := healthcare.NewFhirStore(ctx, "dest_fhirstore", &healthcare.FhirStoreArgs{
 * 			Name:                        pulumi.String("dest_fhir_store"),
 * 			Dataset:                     dataset.ID(),
 * 			Version:                     pulumi.String("R4"),
 * 			EnableUpdateCreate:          pulumi.Bool(true),
 * 			DisableReferentialIntegrity: pulumi.Bool(true),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
 * 			Name:                     pulumi.String("example_bucket_name"),
 * 			Location:                 pulumi.String("us-central1"),
 * 			UniformBucketLevelAccess: pulumi.Bool(true),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		mergeFile, err := storage.NewBucketObject(ctx, "merge_file", &storage.BucketObjectArgs{
 * 			Name:    pulumi.String("merge.wstl"),
 * 			Content: pulumi.String(" "),
 * 			Bucket:  bucket.Name,
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		recon, err := healthcare.NewPipelineJob(ctx, "recon", &healthcare.PipelineJobArgs{
 * 			Name:           pulumi.String("example_recon_pipeline_job"),
 * 			Location:       pulumi.String("us-central1"),
 * 			Dataset:        dataset.ID(),
 * 			DisableLineage: pulumi.Bool(true),
 * 			ReconciliationPipelineJob: &healthcare.PipelineJobReconciliationPipelineJobArgs{
 * 				MergeConfig: &healthcare.PipelineJobReconciliationPipelineJobMergeConfigArgs{
 * 					Description: pulumi.String("sample description for reconciliation rules"),
 * 					WhistleConfigSource: &healthcare.PipelineJobReconciliationPipelineJobMergeConfigWhistleConfigSourceArgs{
 * 						Uri: pulumi.All(bucket.Name, mergeFile.Name).ApplyT(func(_args []interface{}) (string, error) {
 * 							bucketName := _args[0].(string)
 * 							mergeFileName := _args[1].(string)
 * 							return fmt.Sprintf("gs://%v/%v", bucketName, mergeFileName), nil
 * 						}).(pulumi.StringOutput),
 * 						ImportUriPrefix: bucket.Name.ApplyT(func(name string) (string, error) {
 * 							return fmt.Sprintf("gs://%v", name), nil
 * 						}).(pulumi.StringOutput),
 * 					},
 * 				},
 * 				MatchingUriPrefix: bucket.Name.ApplyT(func(name string) (string, error) {
 * 					return fmt.Sprintf("gs://%v", name), nil
 * 				}).(pulumi.StringOutput),
 * 				FhirStoreDestination: pulumi.All(dataset.ID(), destFhirstore.Name).ApplyT(func(_args []interface{}) (string, error) {
 * 					id := _args[0].(string)
 * 					name := _args[1].(string)
 * 					return fmt.Sprintf("%v/fhirStores/%v", id, name), nil
 * 				}).(pulumi.StringOutput),
 * 			},
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		sourceFhirstore, err := healthcare.NewFhirStore(ctx, "source_fhirstore", &healthcare.FhirStoreArgs{
 * 			Name:                        pulumi.String("source_fhir_store"),
 * 			Dataset:                     dataset.ID(),
 * 			Version:                     pulumi.String("R4"),
 * 			EnableUpdateCreate:          pulumi.Bool(true),
 * 			DisableReferentialIntegrity: pulumi.Bool(true),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		mappingFile, err := storage.NewBucketObject(ctx, "mapping_file", &storage.BucketObjectArgs{
 * 			Name:    pulumi.String("mapping.wstl"),
 * 			Content: pulumi.String(" "),
 * 			Bucket:  bucket.Name,
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = healthcare.NewPipelineJob(ctx, "example-mapping-pipeline", &healthcare.PipelineJobArgs{
 * 			Name:           pulumi.String("example_mapping_pipeline_job"),
 * 			Location:       pulumi.String("us-central1"),
 * 			Dataset:        dataset.ID(),
 * 			DisableLineage: pulumi.Bool(true),
 * 			Labels: pulumi.StringMap{
 * 				"example_label_key": pulumi.String("example_label_value"),
 * 			},
 * 			MappingPipelineJob: &healthcare.PipelineJobMappingPipelineJobArgs{
 * 				MappingConfig: &healthcare.PipelineJobMappingPipelineJobMappingConfigArgs{
 * 					WhistleConfigSource: &healthcare.PipelineJobMappingPipelineJobMappingConfigWhistleConfigSourceArgs{
 * 						Uri: pulumi.All(bucket.Name, mappingFile.Name).ApplyT(func(_args []interface{}) (string, error) {
 * 							bucketName := _args[0].(string)
 * 							mappingFileName := _args[1].(string)
 * 							return fmt.Sprintf("gs://%v/%v", bucketName, mappingFileName), nil
 * 						}).(pulumi.StringOutput),
 * 						ImportUriPrefix: bucket.Name.ApplyT(func(name string) (string, error) {
 * 							return fmt.Sprintf("gs://%v", name), nil
 * 						}).(pulumi.StringOutput),
 * 					},
 * 					Description: pulumi.String("example description for mapping configuration"),
 * 				},
 * 				FhirStreamingSource: &healthcare.PipelineJobMappingPipelineJobFhirStreamingSourceArgs{
 * 					FhirStore: pulumi.All(dataset.ID(), sourceFhirstore.Name).ApplyT(func(_args []interface{}) (string, error) {
 * 						id := _args[0].(string)
 * 						name := _args[1].(string)
 * 						return fmt.Sprintf("%v/fhirStores/%v", id, name), nil
 * 					}).(pulumi.StringOutput),
 * 					Description: pulumi.String("example description for streaming fhirstore"),
 * 				},
 * 				ReconciliationDestination: pulumi.Bool(true),
 * 			},
 * 		}, pulumi.DependsOn([]pulumi.Resource{
 * 			recon,
 * 		}))
 * 		if err != nil {
 * 			return err
 * 		}
 * 		_, err = storage.NewBucketIAMMember(ctx, "hsa", &storage.BucketIAMMemberArgs{
 * 			Bucket: bucket.Name,
 * 			Role:   pulumi.String("roles/storage.objectUser"),
 * 			Member: pulumi.Sprintf("serviceAccount:service-%[email protected]", project.Number),
 * 		})
 * 		if err != nil {
 * 			return err
 * 		}
 * 		return nil
 * 	})
 * }
 * ```
 * ```java
 * package generated_program;
 * import com.pulumi.Context;
 * import com.pulumi.Pulumi;
 * import com.pulumi.core.Output;
 * import com.pulumi.gcp.organizations.OrganizationsFunctions;
 * import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
 * import com.pulumi.gcp.healthcare.Dataset;
 * import com.pulumi.gcp.healthcare.DatasetArgs;
 * import com.pulumi.gcp.healthcare.FhirStore;
 * import com.pulumi.gcp.healthcare.FhirStoreArgs;
 * import com.pulumi.gcp.storage.Bucket;
 * import com.pulumi.gcp.storage.BucketArgs;
 * import com.pulumi.gcp.storage.BucketObject;
 * import com.pulumi.gcp.storage.BucketObjectArgs;
 * import com.pulumi.gcp.healthcare.PipelineJob;
 * import com.pulumi.gcp.healthcare.PipelineJobArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobReconciliationPipelineJobArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobReconciliationPipelineJobMergeConfigArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobReconciliationPipelineJobMergeConfigWhistleConfigSourceArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobMappingPipelineJobArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobMappingPipelineJobMappingConfigArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobMappingPipelineJobMappingConfigWhistleConfigSourceArgs;
 * import com.pulumi.gcp.healthcare.inputs.PipelineJobMappingPipelineJobFhirStreamingSourceArgs;
 * import com.pulumi.gcp.storage.BucketIAMMember;
 * import com.pulumi.gcp.storage.BucketIAMMemberArgs;
 * import com.pulumi.resources.CustomResourceOptions;
 * import java.util.List;
 * import java.util.ArrayList;
 * import java.util.Map;
 * import java.io.File;
 * import java.nio.file.Files;
 * import java.nio.file.Paths;
 * public class App {
 *     public static void main(String[] args) {
 *         Pulumi.run(App::stack);
 *     }
 *     public static void stack(Context ctx) {
 *         final var project = OrganizationsFunctions.getProject();
 *         var dataset = new Dataset("dataset", DatasetArgs.builder()
 *             .name("example_dataset")
 *             .location("us-central1")
 *             .build());
 *         var destFhirstore = new FhirStore("destFhirstore", FhirStoreArgs.builder()
 *             .name("dest_fhir_store")
 *             .dataset(dataset.id())
 *             .version("R4")
 *             .enableUpdateCreate(true)
 *             .disableReferentialIntegrity(true)
 *             .build());
 *         var bucket = new Bucket("bucket", BucketArgs.builder()
 *             .name("example_bucket_name")
 *             .location("us-central1")
 *             .uniformBucketLevelAccess(true)
 *             .build());
 *         var mergeFile = new BucketObject("mergeFile", BucketObjectArgs.builder()
 *             .name("merge.wstl")
 *             .content(" ")
 *             .bucket(bucket.name())
 *             .build());
 *         var recon = new PipelineJob("recon", PipelineJobArgs.builder()
 *             .name("example_recon_pipeline_job")
 *             .location("us-central1")
 *             .dataset(dataset.id())
 *             .disableLineage(true)
 *             .reconciliationPipelineJob(PipelineJobReconciliationPipelineJobArgs.builder()
 *                 .mergeConfig(PipelineJobReconciliationPipelineJobMergeConfigArgs.builder()
 *                     .description("sample description for reconciliation rules")
 *                     .whistleConfigSource(PipelineJobReconciliationPipelineJobMergeConfigWhistleConfigSourceArgs.builder()
 *                         .uri(Output.tuple(bucket.name(), mergeFile.name()).applyValue(values -> {
 *                             var bucketName = values.t1;
 *                             var mergeFileName = values.t2;
 *                             return String.format("gs://%s/%s", bucketName,mergeFileName);
 *                         }))
 *                         .importUriPrefix(bucket.name().applyValue(name -> String.format("gs://%s", name)))
 *                         .build())
 *                     .build())
 *                 .matchingUriPrefix(bucket.name().applyValue(name -> String.format("gs://%s", name)))
 *                 .fhirStoreDestination(Output.tuple(dataset.id(), destFhirstore.name()).applyValue(values -> {
 *                     var id = values.t1;
 *                     var name = values.t2;
 *                     return String.format("%s/fhirStores/%s", id,name);
 *                 }))
 *                 .build())
 *             .build());
 *         var sourceFhirstore = new FhirStore("sourceFhirstore", FhirStoreArgs.builder()
 *             .name("source_fhir_store")
 *             .dataset(dataset.id())
 *             .version("R4")
 *             .enableUpdateCreate(true)
 *             .disableReferentialIntegrity(true)
 *             .build());
 *         var mappingFile = new BucketObject("mappingFile", BucketObjectArgs.builder()
 *             .name("mapping.wstl")
 *             .content(" ")
 *             .bucket(bucket.name())
 *             .build());
 *         var example_mapping_pipeline = new PipelineJob("example-mapping-pipeline", PipelineJobArgs.builder()
 *             .name("example_mapping_pipeline_job")
 *             .location("us-central1")
 *             .dataset(dataset.id())
 *             .disableLineage(true)
 *             .labels(Map.of("example_label_key", "example_label_value"))
 *             .mappingPipelineJob(PipelineJobMappingPipelineJobArgs.builder()
 *                 .mappingConfig(PipelineJobMappingPipelineJobMappingConfigArgs.builder()
 *                     .whistleConfigSource(PipelineJobMappingPipelineJobMappingConfigWhistleConfigSourceArgs.builder()
 *                         .uri(Output.tuple(bucket.name(), mappingFile.name()).applyValue(values -> {
 *                             var bucketName = values.t1;
 *                             var mappingFileName = values.t2;
 *                             return String.format("gs://%s/%s", bucketName,mappingFileName);
 *                         }))
 *                         .importUriPrefix(bucket.name().applyValue(name -> String.format("gs://%s", name)))
 *                         .build())
 *                     .description("example description for mapping configuration")
 *                     .build())
 *                 .fhirStreamingSource(PipelineJobMappingPipelineJobFhirStreamingSourceArgs.builder()
 *                     .fhirStore(Output.tuple(dataset.id(), sourceFhirstore.name()).applyValue(values -> {
 *                         var id = values.t1;
 *                         var name = values.t2;
 *                         return String.format("%s/fhirStores/%s", id,name);
 *                     }))
 *                     .description("example description for streaming fhirstore")
 *                     .build())
 *                 .reconciliationDestination(true)
 *                 .build())
 *             .build(), CustomResourceOptions.builder()
 *                 .dependsOn(recon)
 *                 .build());
 *         var hsa = new BucketIAMMember("hsa", BucketIAMMemberArgs.builder()
 *             .bucket(bucket.name())
 *             .role("roles/storage.objectUser")
 *             .member(String.format("serviceAccount:service-%[email protected]", project.applyValue(getProjectResult -> getProjectResult.number())))
 *             .build());
 *     }
 * }
 * ```
 * ```yaml
 * resources:
 *   recon:
 *     type: gcp:healthcare:PipelineJob
 *     properties:
 *       name: example_recon_pipeline_job
 *       location: us-central1
 *       dataset: ${dataset.id}
 *       disableLineage: true
 *       reconciliationPipelineJob:
 *         mergeConfig:
 *           description: sample description for reconciliation rules
 *           whistleConfigSource:
 *             uri: gs://${bucket.name}/${mergeFile.name}
 *             importUriPrefix: gs://${bucket.name}
 *         matchingUriPrefix: gs://${bucket.name}
 *         fhirStoreDestination: ${dataset.id}/fhirStores/${destFhirstore.name}
 *   example-mapping-pipeline:
 *     type: gcp:healthcare:PipelineJob
 *     properties:
 *       name: example_mapping_pipeline_job
 *       location: us-central1
 *       dataset: ${dataset.id}
 *       disableLineage: true
 *       labels:
 *         example_label_key: example_label_value
 *       mappingPipelineJob:
 *         mappingConfig:
 *           whistleConfigSource:
 *             uri: gs://${bucket.name}/${mappingFile.name}
 *             importUriPrefix: gs://${bucket.name}
 *           description: example description for mapping configuration
 *         fhirStreamingSource:
 *           fhirStore: ${dataset.id}/fhirStores/${sourceFhirstore.name}
 *           description: example description for streaming fhirstore
 *         reconciliationDestination: true
 *     options:
 *       dependsOn:
 *         - ${recon}
 *   dataset:
 *     type: gcp:healthcare:Dataset
 *     properties:
 *       name: example_dataset
 *       location: us-central1
 *   sourceFhirstore:
 *     type: gcp:healthcare:FhirStore
 *     name: source_fhirstore
 *     properties:
 *       name: source_fhir_store
 *       dataset: ${dataset.id}
 *       version: R4
 *       enableUpdateCreate: true
 *       disableReferentialIntegrity: true
 *   destFhirstore:
 *     type: gcp:healthcare:FhirStore
 *     name: dest_fhirstore
 *     properties:
 *       name: dest_fhir_store
 *       dataset: ${dataset.id}
 *       version: R4
 *       enableUpdateCreate: true
 *       disableReferentialIntegrity: true
 *   bucket:
 *     type: gcp:storage:Bucket
 *     properties:
 *       name: example_bucket_name
 *       location: us-central1
 *       uniformBucketLevelAccess: true
 *   mappingFile:
 *     type: gcp:storage:BucketObject
 *     name: mapping_file
 *     properties:
 *       name: mapping.wstl
 *       content: ' '
 *       bucket: ${bucket.name}
 *   mergeFile:
 *     type: gcp:storage:BucketObject
 *     name: merge_file
 *     properties:
 *       name: merge.wstl
 *       content: ' '
 *       bucket: ${bucket.name}
 *   hsa:
 *     type: gcp:storage:BucketIAMMember
 *     properties:
 *       bucket: ${bucket.name}
 *       role: roles/storage.objectUser
 *       member: serviceAccount:service-${project.number}@gcp-sa-healthcare.iam.gserviceaccount.com
 * variables:
 *   project:
 *     fn::invoke:
 *       function: gcp:organizations:getProject
 *       arguments: {}
 * ```
 * 
 * ## Import
 * PipelineJob can be imported using any of these accepted formats:
 * * `{{dataset}}/pipelineJobs/{{name}}`
 * * `{{dataset}}/pipelineJobs?pipelineJobId={{name}}`
 * * `{{name}}`
 * When using the `pulumi import` command, PipelineJob can be imported using one of the formats above. For example:
 * ```sh
 * $ pulumi import gcp:healthcare/pipelineJob:PipelineJob default {{dataset}}/pipelineJobs/{{name}}
 * ```
 * ```sh
 * $ pulumi import gcp:healthcare/pipelineJob:PipelineJob default {{dataset}}/pipelineJobs?pipelineJobId={{name}}
 * ```
 * ```sh
 * $ pulumi import gcp:healthcare/pipelineJob:PipelineJob default {{name}}
 * ```
 */
public class PipelineJob internal constructor(
    override val javaResource: com.pulumi.gcp.healthcare.PipelineJob,
) : KotlinCustomResource(javaResource, PipelineJobMapper) {
    /**
     * Specifies the backfill configuration.
     * Structure is documented below.
     */
    public val backfillPipelineJob: Output?
        get() = javaResource.backfillPipelineJob().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.let({ args0 -> pipelineJobBackfillPipelineJobToKotlin(args0) })
            }).orElse(null)
        })

    /**
     * Healthcare Dataset under which the Pipeline Job is to run
     * - - -
     */
    public val dataset: Output
        get() = javaResource.dataset().applyValue({ args0 -> args0 })

    /**
     * If true, disables writing lineage for the pipeline.
     */
    public val disableLineage: Output?
        get() = javaResource.disableLineage().applyValue({ args0 ->
            args0.map({ args0 ->
                args0
            }).orElse(null)
        })

    /**
     * All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
     */
    public val effectiveLabels: Output>
        get() = javaResource.effectiveLabels().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.key.to(args0.value)
            }).toMap()
        })

    /**
     * User-supplied key-value pairs used to organize Pipeline Jobs.
     * Label keys must be between 1 and 63 characters long, have a UTF-8 encoding of
     * maximum 128 bytes, and must conform to the following PCRE regular expression:
     * [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
     * Label values are optional, must be between 1 and 63 characters long, have a
     * UTF-8 encoding of maximum 128 bytes, and must conform to the following PCRE
     * regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
     * No more than 64 labels can be associated with a given pipeline.
     * An object containing a list of "key": value pairs.
     * Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
     * **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
     * Please refer to the field `effective_labels` for all of the labels present on the resource.
     */
    public val labels: Output>?
        get() = javaResource.labels().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.map({ args0 ->
                    args0.key.to(args0.value)
                }).toMap()
            }).orElse(null)
        })

    /**
     * Location where the Pipeline Job is to run
     */
    public val location: Output
        get() = javaResource.location().applyValue({ args0 -> args0 })

    /**
     * Specifies mapping configuration.
     * Structure is documented below.
     */
    public val mappingPipelineJob: Output?
        get() = javaResource.mappingPipelineJob().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.let({ args0 -> pipelineJobMappingPipelineJobToKotlin(args0) })
            }).orElse(null)
        })

    /**
     * Specifies the name of the pipeline job. This field is user-assigned.
     */
    public val name: Output
        get() = javaResource.name().applyValue({ args0 -> args0 })

    /**
     * The combination of labels configured directly on the resource
     * and default labels configured on the provider.
     */
    public val pulumiLabels: Output>
        get() = javaResource.pulumiLabels().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.key.to(args0.value)
            }).toMap()
        })

    /**
     * Specifies reconciliation configuration.
     * Structure is documented below.
     */
    public val reconciliationPipelineJob: Output?
        get() = javaResource.reconciliationPipelineJob().applyValue({ args0 ->
            args0.map({ args0 ->
                args0.let({ args0 -> pipelineJobReconciliationPipelineJobToKotlin(args0) })
            }).orElse(null)
        })

    /**
     * The fully qualified name of this dataset
     */
    public val selfLink: Output
        get() = javaResource.selfLink().applyValue({ args0 -> args0 })
}

public object PipelineJobMapper : ResourceMapper {
    override fun supportsMappingOfType(javaResource: Resource): Boolean =
        com.pulumi.gcp.healthcare.PipelineJob::class == javaResource::class

    override fun map(javaResource: Resource): PipelineJob = PipelineJob(
        javaResource as
            com.pulumi.gcp.healthcare.PipelineJob,
    )
}

/**
 * @see [PipelineJob].
 * @param name The _unique_ name of the resulting resource.
 * @param block Builder for [PipelineJob].
 */
public suspend fun pipelineJob(name: String, block: suspend PipelineJobResourceBuilder.() -> Unit): PipelineJob {
    val builder = PipelineJobResourceBuilder()
    builder.name(name)
    block(builder)
    return builder.build()
}

/**
 * @see [PipelineJob].
 * @param name The _unique_ name of the resulting resource.
 */
public fun pipelineJob(name: String): PipelineJob {
    val builder = PipelineJobResourceBuilder()
    builder.name(name)
    return builder.build()
}




© 2015 - 2025 Weber Informatics LLC | Privacy Policy