com.pulumi.azure.datafactory.DatafactoryFunctions Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of azure Show documentation
Show all versions of azure Show documentation
A Pulumi package for creating and managing Microsoft Azure cloud resources, based on the Terraform azurerm provider. We recommend using the [Azure Native provider](https://github.com/pulumi/pulumi-azure-native) to provision Azure infrastructure. Azure Native provides complete coverage of Azure resources and same-day access to new resources and resource updates.
// *** WARNING: this file was generated by pulumi-java-gen. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package com.pulumi.azure.datafactory;
import com.pulumi.azure.Utilities;
import com.pulumi.azure.datafactory.inputs.GetFactoryArgs;
import com.pulumi.azure.datafactory.inputs.GetFactoryPlainArgs;
import com.pulumi.azure.datafactory.inputs.GetTriggerScheduleArgs;
import com.pulumi.azure.datafactory.inputs.GetTriggerSchedulePlainArgs;
import com.pulumi.azure.datafactory.inputs.GetTriggerSchedulesArgs;
import com.pulumi.azure.datafactory.inputs.GetTriggerSchedulesPlainArgs;
import com.pulumi.azure.datafactory.outputs.GetFactoryResult;
import com.pulumi.azure.datafactory.outputs.GetTriggerScheduleResult;
import com.pulumi.azure.datafactory.outputs.GetTriggerSchedulesResult;
import com.pulumi.core.Output;
import com.pulumi.core.TypeShape;
import com.pulumi.deployment.Deployment;
import com.pulumi.deployment.InvokeOptions;
import java.util.concurrent.CompletableFuture;
public final class DatafactoryFunctions {
/**
* Use this data source to access information about an existing Azure Data Factory (Version 2).
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.datafactory.DatafactoryFunctions;
* import com.pulumi.azure.datafactory.inputs.GetFactoryArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* final var example = DatafactoryFunctions.getFactory(GetFactoryArgs.builder()
* .name("existing-adf")
* .resourceGroupName("existing-rg")
* .build());
*
* ctx.export("id", example.applyValue(getFactoryResult -> getFactoryResult.id()));
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
*/
public static Output getFactory(GetFactoryArgs args) {
return getFactory(args, InvokeOptions.Empty);
}
/**
* Use this data source to access information about an existing Azure Data Factory (Version 2).
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.datafactory.DatafactoryFunctions;
* import com.pulumi.azure.datafactory.inputs.GetFactoryArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* final var example = DatafactoryFunctions.getFactory(GetFactoryArgs.builder()
* .name("existing-adf")
* .resourceGroupName("existing-rg")
* .build());
*
* ctx.export("id", example.applyValue(getFactoryResult -> getFactoryResult.id()));
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
*/
public static CompletableFuture getFactoryPlain(GetFactoryPlainArgs args) {
return getFactoryPlain(args, InvokeOptions.Empty);
}
/**
* Use this data source to access information about an existing Azure Data Factory (Version 2).
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.datafactory.DatafactoryFunctions;
* import com.pulumi.azure.datafactory.inputs.GetFactoryArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* final var example = DatafactoryFunctions.getFactory(GetFactoryArgs.builder()
* .name("existing-adf")
* .resourceGroupName("existing-rg")
* .build());
*
* ctx.export("id", example.applyValue(getFactoryResult -> getFactoryResult.id()));
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
*/
public static Output getFactory(GetFactoryArgs args, InvokeOptions options) {
return Deployment.getInstance().invoke("azure:datafactory/getFactory:getFactory", TypeShape.of(GetFactoryResult.class), args, Utilities.withVersion(options));
}
/**
* Use this data source to access information about an existing Azure Data Factory (Version 2).
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.datafactory.DatafactoryFunctions;
* import com.pulumi.azure.datafactory.inputs.GetFactoryArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* final var example = DatafactoryFunctions.getFactory(GetFactoryArgs.builder()
* .name("existing-adf")
* .resourceGroupName("existing-rg")
* .build());
*
* ctx.export("id", example.applyValue(getFactoryResult -> getFactoryResult.id()));
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
*/
public static CompletableFuture getFactoryPlain(GetFactoryPlainArgs args, InvokeOptions options) {
return Deployment.getInstance().invokeAsync("azure:datafactory/getFactory:getFactory", TypeShape.of(GetFactoryResult.class), args, Utilities.withVersion(options));
}
/**
* Use this data source to access information about a trigger schedule in Azure Data Factory.
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.datafactory.DatafactoryFunctions;
* import com.pulumi.azure.datafactory.inputs.GetTriggerScheduleArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* final var example = DatafactoryFunctions.getTriggerSchedule(GetTriggerScheduleArgs.builder()
* .name("example_trigger")
* .dataFactoryId("/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DataFactory/factories/datafactory1")
* .build());
*
* ctx.export("id", example.applyValue(getTriggerScheduleResult -> getTriggerScheduleResult.id()));
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
*/
public static Output getTriggerSchedule(GetTriggerScheduleArgs args) {
return getTriggerSchedule(args, InvokeOptions.Empty);
}
/**
* Use this data source to access information about a trigger schedule in Azure Data Factory.
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.datafactory.DatafactoryFunctions;
* import com.pulumi.azure.datafactory.inputs.GetTriggerScheduleArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* final var example = DatafactoryFunctions.getTriggerSchedule(GetTriggerScheduleArgs.builder()
* .name("example_trigger")
* .dataFactoryId("/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DataFactory/factories/datafactory1")
* .build());
*
* ctx.export("id", example.applyValue(getTriggerScheduleResult -> getTriggerScheduleResult.id()));
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
*/
public static CompletableFuture getTriggerSchedulePlain(GetTriggerSchedulePlainArgs args) {
return getTriggerSchedulePlain(args, InvokeOptions.Empty);
}
/**
* Use this data source to access information about a trigger schedule in Azure Data Factory.
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.datafactory.DatafactoryFunctions;
* import com.pulumi.azure.datafactory.inputs.GetTriggerScheduleArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* final var example = DatafactoryFunctions.getTriggerSchedule(GetTriggerScheduleArgs.builder()
* .name("example_trigger")
* .dataFactoryId("/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DataFactory/factories/datafactory1")
* .build());
*
* ctx.export("id", example.applyValue(getTriggerScheduleResult -> getTriggerScheduleResult.id()));
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
*/
public static Output getTriggerSchedule(GetTriggerScheduleArgs args, InvokeOptions options) {
return Deployment.getInstance().invoke("azure:datafactory/getTriggerSchedule:getTriggerSchedule", TypeShape.of(GetTriggerScheduleResult.class), args, Utilities.withVersion(options));
}
/**
* Use this data source to access information about a trigger schedule in Azure Data Factory.
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.datafactory.DatafactoryFunctions;
* import com.pulumi.azure.datafactory.inputs.GetTriggerScheduleArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* final var example = DatafactoryFunctions.getTriggerSchedule(GetTriggerScheduleArgs.builder()
* .name("example_trigger")
* .dataFactoryId("/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DataFactory/factories/datafactory1")
* .build());
*
* ctx.export("id", example.applyValue(getTriggerScheduleResult -> getTriggerScheduleResult.id()));
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
*/
public static CompletableFuture getTriggerSchedulePlain(GetTriggerSchedulePlainArgs args, InvokeOptions options) {
return Deployment.getInstance().invokeAsync("azure:datafactory/getTriggerSchedule:getTriggerSchedule", TypeShape.of(GetTriggerScheduleResult.class), args, Utilities.withVersion(options));
}
/**
* Use this data source to access information about all existing trigger schedules in Azure Data Factory.
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.datafactory.DatafactoryFunctions;
* import com.pulumi.azure.datafactory.inputs.GetTriggerSchedulesArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* final var example = DatafactoryFunctions.getTriggerSchedules(GetTriggerSchedulesArgs.builder()
* .dataFactoryId("/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DataFactory/factories/datafactory1")
* .build());
*
* ctx.export("items", example.applyValue(getTriggerSchedulesResult -> getTriggerSchedulesResult.items()));
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
*/
public static Output getTriggerSchedules(GetTriggerSchedulesArgs args) {
return getTriggerSchedules(args, InvokeOptions.Empty);
}
/**
* Use this data source to access information about all existing trigger schedules in Azure Data Factory.
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.datafactory.DatafactoryFunctions;
* import com.pulumi.azure.datafactory.inputs.GetTriggerSchedulesArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* final var example = DatafactoryFunctions.getTriggerSchedules(GetTriggerSchedulesArgs.builder()
* .dataFactoryId("/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DataFactory/factories/datafactory1")
* .build());
*
* ctx.export("items", example.applyValue(getTriggerSchedulesResult -> getTriggerSchedulesResult.items()));
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
*/
public static CompletableFuture getTriggerSchedulesPlain(GetTriggerSchedulesPlainArgs args) {
return getTriggerSchedulesPlain(args, InvokeOptions.Empty);
}
/**
* Use this data source to access information about all existing trigger schedules in Azure Data Factory.
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.datafactory.DatafactoryFunctions;
* import com.pulumi.azure.datafactory.inputs.GetTriggerSchedulesArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* final var example = DatafactoryFunctions.getTriggerSchedules(GetTriggerSchedulesArgs.builder()
* .dataFactoryId("/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DataFactory/factories/datafactory1")
* .build());
*
* ctx.export("items", example.applyValue(getTriggerSchedulesResult -> getTriggerSchedulesResult.items()));
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
*/
public static Output getTriggerSchedules(GetTriggerSchedulesArgs args, InvokeOptions options) {
return Deployment.getInstance().invoke("azure:datafactory/getTriggerSchedules:getTriggerSchedules", TypeShape.of(GetTriggerSchedulesResult.class), args, Utilities.withVersion(options));
}
/**
* Use this data source to access information about all existing trigger schedules in Azure Data Factory.
*
* ## Example Usage
*
* <!--Start PulumiCodeChooser -->
*
* {@code
* package generated_program;
*
* import com.pulumi.Context;
* import com.pulumi.Pulumi;
* import com.pulumi.core.Output;
* import com.pulumi.azure.datafactory.DatafactoryFunctions;
* import com.pulumi.azure.datafactory.inputs.GetTriggerSchedulesArgs;
* import java.util.List;
* import java.util.ArrayList;
* import java.util.Map;
* import java.io.File;
* import java.nio.file.Files;
* import java.nio.file.Paths;
*
* public class App {
* public static void main(String[] args) {
* Pulumi.run(App::stack);
* }
*
* public static void stack(Context ctx) {
* final var example = DatafactoryFunctions.getTriggerSchedules(GetTriggerSchedulesArgs.builder()
* .dataFactoryId("/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg1/providers/Microsoft.DataFactory/factories/datafactory1")
* .build());
*
* ctx.export("items", example.applyValue(getTriggerSchedulesResult -> getTriggerSchedulesResult.items()));
* }
* }
* }
*
* <!--End PulumiCodeChooser -->
*
*/
public static CompletableFuture getTriggerSchedulesPlain(GetTriggerSchedulesPlainArgs args, InvokeOptions options) {
return Deployment.getInstance().invokeAsync("azure:datafactory/getTriggerSchedules:getTriggerSchedules", TypeShape.of(GetTriggerSchedulesResult.class), args, Utilities.withVersion(options));
}
}