We recommend using Azure Native.
azure.datafactory.DataFlow
Explore with Pulumi AI
Manages a Data Flow inside an Azure Data Factory.
Example Usage
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() =>
{
var exampleResourceGroup = new Azure.Core.ResourceGroup("exampleResourceGroup", new()
{
Location = "West Europe",
});
var exampleAccount = new Azure.Storage.Account("exampleAccount", new()
{
Location = exampleResourceGroup.Location,
ResourceGroupName = exampleResourceGroup.Name,
AccountTier = "Standard",
AccountReplicationType = "LRS",
});
var exampleFactory = new Azure.DataFactory.Factory("exampleFactory", new()
{
Location = exampleResourceGroup.Location,
ResourceGroupName = exampleResourceGroup.Name,
});
var exampleLinkedCustomService = new Azure.DataFactory.LinkedCustomService("exampleLinkedCustomService", new()
{
DataFactoryId = exampleFactory.Id,
Type = "AzureBlobStorage",
TypePropertiesJson = exampleAccount.PrimaryConnectionString.Apply(primaryConnectionString => @$"{{
""connectionString"": ""{primaryConnectionString}""
}}
"),
});
var example1DatasetJson = new Azure.DataFactory.DatasetJson("example1DatasetJson", new()
{
DataFactoryId = exampleFactory.Id,
LinkedServiceName = exampleLinkedCustomService.Name,
AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetJsonAzureBlobStorageLocationArgs
{
Container = "container",
Path = "foo/bar/",
Filename = "foo.txt",
},
Encoding = "UTF-8",
});
var example2DatasetJson = new Azure.DataFactory.DatasetJson("example2DatasetJson", new()
{
DataFactoryId = exampleFactory.Id,
LinkedServiceName = exampleLinkedCustomService.Name,
AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetJsonAzureBlobStorageLocationArgs
{
Container = "container",
Path = "foo/bar/",
Filename = "bar.txt",
},
Encoding = "UTF-8",
});
var example1FlowletDataFlow = new Azure.DataFactory.FlowletDataFlow("example1FlowletDataFlow", new()
{
DataFactoryId = exampleFactory.Id,
Sources = new[]
{
new Azure.DataFactory.Inputs.FlowletDataFlowSourceArgs
{
Name = "source1",
LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSourceLinkedServiceArgs
{
Name = exampleLinkedCustomService.Name,
},
},
},
Sinks = new[]
{
new Azure.DataFactory.Inputs.FlowletDataFlowSinkArgs
{
Name = "sink1",
LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSinkLinkedServiceArgs
{
Name = exampleLinkedCustomService.Name,
},
},
},
Script = @"source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
",
});
var example2FlowletDataFlow = new Azure.DataFactory.FlowletDataFlow("example2FlowletDataFlow", new()
{
DataFactoryId = exampleFactory.Id,
Sources = new[]
{
new Azure.DataFactory.Inputs.FlowletDataFlowSourceArgs
{
Name = "source1",
LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSourceLinkedServiceArgs
{
Name = exampleLinkedCustomService.Name,
},
},
},
Sinks = new[]
{
new Azure.DataFactory.Inputs.FlowletDataFlowSinkArgs
{
Name = "sink1",
LinkedService = new Azure.DataFactory.Inputs.FlowletDataFlowSinkLinkedServiceArgs
{
Name = exampleLinkedCustomService.Name,
},
},
},
Script = @"source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
",
});
var exampleDataFlow = new Azure.DataFactory.DataFlow("exampleDataFlow", new()
{
DataFactoryId = exampleFactory.Id,
Sources = new[]
{
new Azure.DataFactory.Inputs.DataFlowSourceArgs
{
Name = "source1",
Flowlet = new Azure.DataFactory.Inputs.DataFlowSourceFlowletArgs
{
Name = example1FlowletDataFlow.Name,
Parameters =
{
{ "Key1", "value1" },
},
},
Dataset = new Azure.DataFactory.Inputs.DataFlowSourceDatasetArgs
{
Name = example1DatasetJson.Name,
},
},
},
Sinks = new[]
{
new Azure.DataFactory.Inputs.DataFlowSinkArgs
{
Name = "sink1",
Flowlet = new Azure.DataFactory.Inputs.DataFlowSinkFlowletArgs
{
Name = example2FlowletDataFlow.Name,
Parameters =
{
{ "Key1", "value1" },
},
},
Dataset = new Azure.DataFactory.Inputs.DataFlowSinkDatasetArgs
{
Name = example2DatasetJson.Name,
},
},
},
Script = @"source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
",
});
});
package main
import (
"fmt"
"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
exampleResourceGroup, err := core.NewResourceGroup(ctx, "exampleResourceGroup", &core.ResourceGroupArgs{
Location: pulumi.String("West Europe"),
})
if err != nil {
return err
}
exampleAccount, err := storage.NewAccount(ctx, "exampleAccount", &storage.AccountArgs{
Location: exampleResourceGroup.Location,
ResourceGroupName: exampleResourceGroup.Name,
AccountTier: pulumi.String("Standard"),
AccountReplicationType: pulumi.String("LRS"),
})
if err != nil {
return err
}
exampleFactory, err := datafactory.NewFactory(ctx, "exampleFactory", &datafactory.FactoryArgs{
Location: exampleResourceGroup.Location,
ResourceGroupName: exampleResourceGroup.Name,
})
if err != nil {
return err
}
exampleLinkedCustomService, err := datafactory.NewLinkedCustomService(ctx, "exampleLinkedCustomService", &datafactory.LinkedCustomServiceArgs{
DataFactoryId: exampleFactory.ID(),
Type: pulumi.String("AzureBlobStorage"),
TypePropertiesJson: exampleAccount.PrimaryConnectionString.ApplyT(func(primaryConnectionString string) (string, error) {
return fmt.Sprintf("{\n \"connectionString\": \"%v\"\n}\n", primaryConnectionString), nil
}).(pulumi.StringOutput),
})
if err != nil {
return err
}
example1DatasetJson, err := datafactory.NewDatasetJson(ctx, "example1DatasetJson", &datafactory.DatasetJsonArgs{
DataFactoryId: exampleFactory.ID(),
LinkedServiceName: exampleLinkedCustomService.Name,
AzureBlobStorageLocation: &datafactory.DatasetJsonAzureBlobStorageLocationArgs{
Container: pulumi.String("container"),
Path: pulumi.String("foo/bar/"),
Filename: pulumi.String("foo.txt"),
},
Encoding: pulumi.String("UTF-8"),
})
if err != nil {
return err
}
example2DatasetJson, err := datafactory.NewDatasetJson(ctx, "example2DatasetJson", &datafactory.DatasetJsonArgs{
DataFactoryId: exampleFactory.ID(),
LinkedServiceName: exampleLinkedCustomService.Name,
AzureBlobStorageLocation: &datafactory.DatasetJsonAzureBlobStorageLocationArgs{
Container: pulumi.String("container"),
Path: pulumi.String("foo/bar/"),
Filename: pulumi.String("bar.txt"),
},
Encoding: pulumi.String("UTF-8"),
})
if err != nil {
return err
}
example1FlowletDataFlow, err := datafactory.NewFlowletDataFlow(ctx, "example1FlowletDataFlow", &datafactory.FlowletDataFlowArgs{
DataFactoryId: exampleFactory.ID(),
Sources: datafactory.FlowletDataFlowSourceArray{
&datafactory.FlowletDataFlowSourceArgs{
Name: pulumi.String("source1"),
LinkedService: &datafactory.FlowletDataFlowSourceLinkedServiceArgs{
Name: exampleLinkedCustomService.Name,
},
},
},
Sinks: datafactory.FlowletDataFlowSinkArray{
&datafactory.FlowletDataFlowSinkArgs{
Name: pulumi.String("sink1"),
LinkedService: &datafactory.FlowletDataFlowSinkLinkedServiceArgs{
Name: exampleLinkedCustomService.Name,
},
},
},
Script: pulumi.String(`source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
`),
})
if err != nil {
return err
}
example2FlowletDataFlow, err := datafactory.NewFlowletDataFlow(ctx, "example2FlowletDataFlow", &datafactory.FlowletDataFlowArgs{
DataFactoryId: exampleFactory.ID(),
Sources: datafactory.FlowletDataFlowSourceArray{
&datafactory.FlowletDataFlowSourceArgs{
Name: pulumi.String("source1"),
LinkedService: &datafactory.FlowletDataFlowSourceLinkedServiceArgs{
Name: exampleLinkedCustomService.Name,
},
},
},
Sinks: datafactory.FlowletDataFlowSinkArray{
&datafactory.FlowletDataFlowSinkArgs{
Name: pulumi.String("sink1"),
LinkedService: &datafactory.FlowletDataFlowSinkLinkedServiceArgs{
Name: exampleLinkedCustomService.Name,
},
},
},
Script: pulumi.String(`source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
`),
})
if err != nil {
return err
}
_, err = datafactory.NewDataFlow(ctx, "exampleDataFlow", &datafactory.DataFlowArgs{
DataFactoryId: exampleFactory.ID(),
Sources: datafactory.DataFlowSourceArray{
&datafactory.DataFlowSourceArgs{
Name: pulumi.String("source1"),
Flowlet: &datafactory.DataFlowSourceFlowletArgs{
Name: example1FlowletDataFlow.Name,
Parameters: pulumi.StringMap{
"Key1": pulumi.String("value1"),
},
},
Dataset: &datafactory.DataFlowSourceDatasetArgs{
Name: example1DatasetJson.Name,
},
},
},
Sinks: datafactory.DataFlowSinkArray{
&datafactory.DataFlowSinkArgs{
Name: pulumi.String("sink1"),
Flowlet: &datafactory.DataFlowSinkFlowletArgs{
Name: example2FlowletDataFlow.Name,
Parameters: pulumi.StringMap{
"Key1": pulumi.String("value1"),
},
},
Dataset: &datafactory.DataFlowSinkDatasetArgs{
Name: example2DatasetJson.Name,
},
},
},
Script: pulumi.String(`source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
`),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.storage.Account;
import com.pulumi.azure.storage.AccountArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.LinkedCustomService;
import com.pulumi.azure.datafactory.LinkedCustomServiceArgs;
import com.pulumi.azure.datafactory.DatasetJson;
import com.pulumi.azure.datafactory.DatasetJsonArgs;
import com.pulumi.azure.datafactory.inputs.DatasetJsonAzureBlobStorageLocationArgs;
import com.pulumi.azure.datafactory.FlowletDataFlow;
import com.pulumi.azure.datafactory.FlowletDataFlowArgs;
import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSourceArgs;
import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSourceLinkedServiceArgs;
import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSinkArgs;
import com.pulumi.azure.datafactory.inputs.FlowletDataFlowSinkLinkedServiceArgs;
import com.pulumi.azure.datafactory.DataFlow;
import com.pulumi.azure.datafactory.DataFlowArgs;
import com.pulumi.azure.datafactory.inputs.DataFlowSourceArgs;
import com.pulumi.azure.datafactory.inputs.DataFlowSourceFlowletArgs;
import com.pulumi.azure.datafactory.inputs.DataFlowSourceDatasetArgs;
import com.pulumi.azure.datafactory.inputs.DataFlowSinkArgs;
import com.pulumi.azure.datafactory.inputs.DataFlowSinkFlowletArgs;
import com.pulumi.azure.datafactory.inputs.DataFlowSinkDatasetArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()
.location("West Europe")
.build());
var exampleAccount = new Account("exampleAccount", AccountArgs.builder()
.location(exampleResourceGroup.location())
.resourceGroupName(exampleResourceGroup.name())
.accountTier("Standard")
.accountReplicationType("LRS")
.build());
var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
.location(exampleResourceGroup.location())
.resourceGroupName(exampleResourceGroup.name())
.build());
var exampleLinkedCustomService = new LinkedCustomService("exampleLinkedCustomService", LinkedCustomServiceArgs.builder()
.dataFactoryId(exampleFactory.id())
.type("AzureBlobStorage")
.typePropertiesJson(exampleAccount.primaryConnectionString().applyValue(primaryConnectionString -> """
{
"connectionString": "%s"
}
", primaryConnectionString)))
.build());
var example1DatasetJson = new DatasetJson("example1DatasetJson", DatasetJsonArgs.builder()
.dataFactoryId(exampleFactory.id())
.linkedServiceName(exampleLinkedCustomService.name())
.azureBlobStorageLocation(DatasetJsonAzureBlobStorageLocationArgs.builder()
.container("container")
.path("foo/bar/")
.filename("foo.txt")
.build())
.encoding("UTF-8")
.build());
var example2DatasetJson = new DatasetJson("example2DatasetJson", DatasetJsonArgs.builder()
.dataFactoryId(exampleFactory.id())
.linkedServiceName(exampleLinkedCustomService.name())
.azureBlobStorageLocation(DatasetJsonAzureBlobStorageLocationArgs.builder()
.container("container")
.path("foo/bar/")
.filename("bar.txt")
.build())
.encoding("UTF-8")
.build());
var example1FlowletDataFlow = new FlowletDataFlow("example1FlowletDataFlow", FlowletDataFlowArgs.builder()
.dataFactoryId(exampleFactory.id())
.sources(FlowletDataFlowSourceArgs.builder()
.name("source1")
.linkedService(FlowletDataFlowSourceLinkedServiceArgs.builder()
.name(exampleLinkedCustomService.name())
.build())
.build())
.sinks(FlowletDataFlowSinkArgs.builder()
.name("sink1")
.linkedService(FlowletDataFlowSinkLinkedServiceArgs.builder()
.name(exampleLinkedCustomService.name())
.build())
.build())
.script("""
source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
""")
.build());
var example2FlowletDataFlow = new FlowletDataFlow("example2FlowletDataFlow", FlowletDataFlowArgs.builder()
.dataFactoryId(exampleFactory.id())
.sources(FlowletDataFlowSourceArgs.builder()
.name("source1")
.linkedService(FlowletDataFlowSourceLinkedServiceArgs.builder()
.name(exampleLinkedCustomService.name())
.build())
.build())
.sinks(FlowletDataFlowSinkArgs.builder()
.name("sink1")
.linkedService(FlowletDataFlowSinkLinkedServiceArgs.builder()
.name(exampleLinkedCustomService.name())
.build())
.build())
.script("""
source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
""")
.build());
var exampleDataFlow = new DataFlow("exampleDataFlow", DataFlowArgs.builder()
.dataFactoryId(exampleFactory.id())
.sources(DataFlowSourceArgs.builder()
.name("source1")
.flowlet(DataFlowSourceFlowletArgs.builder()
.name(example1FlowletDataFlow.name())
.parameters(Map.of("Key1", "value1"))
.build())
.dataset(DataFlowSourceDatasetArgs.builder()
.name(example1DatasetJson.name())
.build())
.build())
.sinks(DataFlowSinkArgs.builder()
.name("sink1")
.flowlet(DataFlowSinkFlowletArgs.builder()
.name(example2FlowletDataFlow.name())
.parameters(Map.of("Key1", "value1"))
.build())
.dataset(DataFlowSinkDatasetArgs.builder()
.name(example2DatasetJson.name())
.build())
.build())
.script("""
source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
""")
.build());
}
}
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.storage.Account("exampleAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
account_tier="Standard",
account_replication_type="LRS")
example_factory = azure.datafactory.Factory("exampleFactory",
location=example_resource_group.location,
resource_group_name=example_resource_group.name)
example_linked_custom_service = azure.datafactory.LinkedCustomService("exampleLinkedCustomService",
data_factory_id=example_factory.id,
type="AzureBlobStorage",
type_properties_json=example_account.primary_connection_string.apply(lambda primary_connection_string: f"""{{
"connectionString": "{primary_connection_string}"
}}
"""))
example1_dataset_json = azure.datafactory.DatasetJson("example1DatasetJson",
data_factory_id=example_factory.id,
linked_service_name=example_linked_custom_service.name,
azure_blob_storage_location=azure.datafactory.DatasetJsonAzureBlobStorageLocationArgs(
container="container",
path="foo/bar/",
filename="foo.txt",
),
encoding="UTF-8")
example2_dataset_json = azure.datafactory.DatasetJson("example2DatasetJson",
data_factory_id=example_factory.id,
linked_service_name=example_linked_custom_service.name,
azure_blob_storage_location=azure.datafactory.DatasetJsonAzureBlobStorageLocationArgs(
container="container",
path="foo/bar/",
filename="bar.txt",
),
encoding="UTF-8")
example1_flowlet_data_flow = azure.datafactory.FlowletDataFlow("example1FlowletDataFlow",
data_factory_id=example_factory.id,
sources=[azure.datafactory.FlowletDataFlowSourceArgs(
name="source1",
linked_service=azure.datafactory.FlowletDataFlowSourceLinkedServiceArgs(
name=example_linked_custom_service.name,
),
)],
sinks=[azure.datafactory.FlowletDataFlowSinkArgs(
name="sink1",
linked_service=azure.datafactory.FlowletDataFlowSinkLinkedServiceArgs(
name=example_linked_custom_service.name,
),
)],
script="""source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
""")
example2_flowlet_data_flow = azure.datafactory.FlowletDataFlow("example2FlowletDataFlow",
data_factory_id=example_factory.id,
sources=[azure.datafactory.FlowletDataFlowSourceArgs(
name="source1",
linked_service=azure.datafactory.FlowletDataFlowSourceLinkedServiceArgs(
name=example_linked_custom_service.name,
),
)],
sinks=[azure.datafactory.FlowletDataFlowSinkArgs(
name="sink1",
linked_service=azure.datafactory.FlowletDataFlowSinkLinkedServiceArgs(
name=example_linked_custom_service.name,
),
)],
script="""source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
""")
example_data_flow = azure.datafactory.DataFlow("exampleDataFlow",
data_factory_id=example_factory.id,
sources=[azure.datafactory.DataFlowSourceArgs(
name="source1",
flowlet=azure.datafactory.DataFlowSourceFlowletArgs(
name=example1_flowlet_data_flow.name,
parameters={
"Key1": "value1",
},
),
dataset=azure.datafactory.DataFlowSourceDatasetArgs(
name=example1_dataset_json.name,
),
)],
sinks=[azure.datafactory.DataFlowSinkArgs(
name="sink1",
flowlet=azure.datafactory.DataFlowSinkFlowletArgs(
name=example2_flowlet_data_flow.name,
parameters={
"Key1": "value1",
},
),
dataset=azure.datafactory.DataFlowSinkDatasetArgs(
name=example2_dataset_json.name,
),
)],
script="""source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
""")
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const exampleResourceGroup = new azure.core.ResourceGroup("exampleResourceGroup", {location: "West Europe"});
const exampleAccount = new azure.storage.Account("exampleAccount", {
location: exampleResourceGroup.location,
resourceGroupName: exampleResourceGroup.name,
accountTier: "Standard",
accountReplicationType: "LRS",
});
const exampleFactory = new azure.datafactory.Factory("exampleFactory", {
location: exampleResourceGroup.location,
resourceGroupName: exampleResourceGroup.name,
});
const exampleLinkedCustomService = new azure.datafactory.LinkedCustomService("exampleLinkedCustomService", {
dataFactoryId: exampleFactory.id,
type: "AzureBlobStorage",
typePropertiesJson: pulumi.interpolate`{
"connectionString": "${exampleAccount.primaryConnectionString}"
}
`,
});
const example1DatasetJson = new azure.datafactory.DatasetJson("example1DatasetJson", {
dataFactoryId: exampleFactory.id,
linkedServiceName: exampleLinkedCustomService.name,
azureBlobStorageLocation: {
container: "container",
path: "foo/bar/",
filename: "foo.txt",
},
encoding: "UTF-8",
});
const example2DatasetJson = new azure.datafactory.DatasetJson("example2DatasetJson", {
dataFactoryId: exampleFactory.id,
linkedServiceName: exampleLinkedCustomService.name,
azureBlobStorageLocation: {
container: "container",
path: "foo/bar/",
filename: "bar.txt",
},
encoding: "UTF-8",
});
const example1FlowletDataFlow = new azure.datafactory.FlowletDataFlow("example1FlowletDataFlow", {
dataFactoryId: exampleFactory.id,
sources: [{
name: "source1",
linkedService: {
name: exampleLinkedCustomService.name,
},
}],
sinks: [{
name: "sink1",
linkedService: {
name: exampleLinkedCustomService.name,
},
}],
script: `source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
`,
});
const example2FlowletDataFlow = new azure.datafactory.FlowletDataFlow("example2FlowletDataFlow", {
dataFactoryId: exampleFactory.id,
sources: [{
name: "source1",
linkedService: {
name: exampleLinkedCustomService.name,
},
}],
sinks: [{
name: "sink1",
linkedService: {
name: exampleLinkedCustomService.name,
},
}],
script: `source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
`,
});
const exampleDataFlow = new azure.datafactory.DataFlow("exampleDataFlow", {
dataFactoryId: exampleFactory.id,
sources: [{
name: "source1",
flowlet: {
name: example1FlowletDataFlow.name,
parameters: {
Key1: "value1",
},
},
dataset: {
name: example1DatasetJson.name,
},
}],
sinks: [{
name: "sink1",
flowlet: {
name: example2FlowletDataFlow.name,
parameters: {
Key1: "value1",
},
},
dataset: {
name: example2DatasetJson.name,
},
}],
script: `source(
allowSchemaDrift: true,
validateSchema: false,
limit: 100,
ignoreNoFilesFound: false,
documentForm: 'documentPerLine') ~> source1
source1 sink(
allowSchemaDrift: true,
validateSchema: false,
skipDuplicateMapInputs: true,
skipDuplicateMapOutputs: true) ~> sink1
`,
});
resources:
exampleResourceGroup:
type: azure:core:ResourceGroup
properties:
location: West Europe
exampleAccount:
type: azure:storage:Account
properties:
location: ${exampleResourceGroup.location}
resourceGroupName: ${exampleResourceGroup.name}
accountTier: Standard
accountReplicationType: LRS
exampleFactory:
type: azure:datafactory:Factory
properties:
location: ${exampleResourceGroup.location}
resourceGroupName: ${exampleResourceGroup.name}
exampleLinkedCustomService:
type: azure:datafactory:LinkedCustomService
properties:
dataFactoryId: ${exampleFactory.id}
type: AzureBlobStorage
typePropertiesJson: |
{
"connectionString": "${exampleAccount.primaryConnectionString}"
}
example1DatasetJson:
type: azure:datafactory:DatasetJson
properties:
dataFactoryId: ${exampleFactory.id}
linkedServiceName: ${exampleLinkedCustomService.name}
azureBlobStorageLocation:
container: container
path: foo/bar/
filename: foo.txt
encoding: UTF-8
example2DatasetJson:
type: azure:datafactory:DatasetJson
properties:
dataFactoryId: ${exampleFactory.id}
linkedServiceName: ${exampleLinkedCustomService.name}
azureBlobStorageLocation:
container: container
path: foo/bar/
filename: bar.txt
encoding: UTF-8
exampleDataFlow:
type: azure:datafactory:DataFlow
properties:
dataFactoryId: ${exampleFactory.id}
sources:
- name: source1
flowlet:
name: ${example1FlowletDataFlow.name}
parameters:
Key1: value1
dataset:
name: ${example1DatasetJson.name}
sinks:
- name: sink1
flowlet:
name: ${example2FlowletDataFlow.name}
parameters:
Key1: value1
dataset:
name: ${example2DatasetJson.name}
script: "source(\n allowSchemaDrift: true, \n validateSchema: false, \n limit: 100, \n ignoreNoFilesFound: false, \n documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n allowSchemaDrift: true, \n validateSchema: false, \n skipDuplicateMapInputs: true, \n skipDuplicateMapOutputs: true) ~> sink1\n"
example1FlowletDataFlow:
type: azure:datafactory:FlowletDataFlow
properties:
dataFactoryId: ${exampleFactory.id}
sources:
- name: source1
linkedService:
name: ${exampleLinkedCustomService.name}
sinks:
- name: sink1
linkedService:
name: ${exampleLinkedCustomService.name}
script: "source(\n allowSchemaDrift: true, \n validateSchema: false, \n limit: 100, \n ignoreNoFilesFound: false, \n documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n allowSchemaDrift: true, \n validateSchema: false, \n skipDuplicateMapInputs: true, \n skipDuplicateMapOutputs: true) ~> sink1\n"
example2FlowletDataFlow:
type: azure:datafactory:FlowletDataFlow
properties:
dataFactoryId: ${exampleFactory.id}
sources:
- name: source1
linkedService:
name: ${exampleLinkedCustomService.name}
sinks:
- name: sink1
linkedService:
name: ${exampleLinkedCustomService.name}
script: "source(\n allowSchemaDrift: true, \n validateSchema: false, \n limit: 100, \n ignoreNoFilesFound: false, \n documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n allowSchemaDrift: true, \n validateSchema: false, \n skipDuplicateMapInputs: true, \n skipDuplicateMapOutputs: true) ~> sink1\n"
Create DataFlow Resource
new DataFlow(name: string, args: DataFlowArgs, opts?: CustomResourceOptions);
@overload
def DataFlow(resource_name: str,
opts: Optional[ResourceOptions] = None,
annotations: Optional[Sequence[str]] = None,
data_factory_id: Optional[str] = None,
description: Optional[str] = None,
folder: Optional[str] = None,
name: Optional[str] = None,
script: Optional[str] = None,
script_lines: Optional[Sequence[str]] = None,
sinks: Optional[Sequence[DataFlowSinkArgs]] = None,
sources: Optional[Sequence[DataFlowSourceArgs]] = None,
transformations: Optional[Sequence[DataFlowTransformationArgs]] = None)
@overload
def DataFlow(resource_name: str,
args: DataFlowArgs,
opts: Optional[ResourceOptions] = None)
func NewDataFlow(ctx *Context, name string, args DataFlowArgs, opts ...ResourceOption) (*DataFlow, error)
public DataFlow(string name, DataFlowArgs args, CustomResourceOptions? opts = null)
public DataFlow(String name, DataFlowArgs args)
public DataFlow(String name, DataFlowArgs args, CustomResourceOptions options)
type: azure:datafactory:DataFlow
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DataFlowArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DataFlowArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DataFlowArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DataFlowArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DataFlowArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
DataFlow Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The DataFlow resource accepts the following input properties:
- Data
Factory stringId The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
- Sinks
List<Data
Flow Sink> One or more
sink
blocks as defined below.- Sources
List<Data
Flow Source> One or more
source
blocks as defined below.- Annotations List<string>
List of tags that can be used for describing the Data Factory Data Flow.
- Description string
The description for the Data Factory Data Flow.
- Folder string
The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
- Name string
Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.
- Script string
The script for the Data Factory Data Flow.
- Script
Lines List<string> The script lines for the Data Factory Data Flow.
- Transformations
List<Data
Flow Transformation> One or more
transformation
blocks as defined below.
- Data
Factory stringId The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
- Sinks
[]Data
Flow Sink Args One or more
sink
blocks as defined below.- Sources
[]Data
Flow Source Args One or more
source
blocks as defined below.- Annotations []string
List of tags that can be used for describing the Data Factory Data Flow.
- Description string
The description for the Data Factory Data Flow.
- Folder string
The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
- Name string
Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.
- Script string
The script for the Data Factory Data Flow.
- Script
Lines []string The script lines for the Data Factory Data Flow.
- Transformations
[]Data
Flow Transformation Args One or more
transformation
blocks as defined below.
- data
Factory StringId The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
- sinks
List<Data
Flow Sink> One or more
sink
blocks as defined below.- sources
List<Data
Flow Source> One or more
source
blocks as defined below.- annotations List<String>
List of tags that can be used for describing the Data Factory Data Flow.
- description String
The description for the Data Factory Data Flow.
- folder String
The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
- name String
Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.
- script String
The script for the Data Factory Data Flow.
- script
Lines List<String> The script lines for the Data Factory Data Flow.
- transformations
List<Data
Flow Transformation> One or more
transformation
blocks as defined below.
- data
Factory stringId The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
- sinks
Data
Flow Sink[] One or more
sink
blocks as defined below.- sources
Data
Flow Source[] One or more
source
blocks as defined below.- annotations string[]
List of tags that can be used for describing the Data Factory Data Flow.
- description string
The description for the Data Factory Data Flow.
- folder string
The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
- name string
Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.
- script string
The script for the Data Factory Data Flow.
- script
Lines string[] The script lines for the Data Factory Data Flow.
- transformations
Data
Flow Transformation[] One or more
transformation
blocks as defined below.
- data_
factory_ strid The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
- sinks
Sequence[Data
Flow Sink Args] One or more
sink
blocks as defined below.- sources
Sequence[Data
Flow Source Args] One or more
source
blocks as defined below.- annotations Sequence[str]
List of tags that can be used for describing the Data Factory Data Flow.
- description str
The description for the Data Factory Data Flow.
- folder str
The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
- name str
Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.
- script str
The script for the Data Factory Data Flow.
- script_
lines Sequence[str] The script lines for the Data Factory Data Flow.
- transformations
Sequence[Data
Flow Transformation Args] One or more
transformation
blocks as defined below.
- data
Factory StringId The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
- sinks List<Property Map>
One or more
sink
blocks as defined below.- sources List<Property Map>
One or more
source
blocks as defined below.- annotations List<String>
List of tags that can be used for describing the Data Factory Data Flow.
- description String
The description for the Data Factory Data Flow.
- folder String
The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
- name String
Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.
- script String
The script for the Data Factory Data Flow.
- script
Lines List<String> The script lines for the Data Factory Data Flow.
- transformations List<Property Map>
One or more
transformation
blocks as defined below.
Outputs
All input properties are implicitly available as output properties. Additionally, the DataFlow resource produces the following output properties:
- Id string
The provider-assigned unique ID for this managed resource.
- Id string
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
- id string
The provider-assigned unique ID for this managed resource.
- id str
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
Look up Existing DataFlow Resource
Get an existing DataFlow resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DataFlowState, opts?: CustomResourceOptions): DataFlow
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
annotations: Optional[Sequence[str]] = None,
data_factory_id: Optional[str] = None,
description: Optional[str] = None,
folder: Optional[str] = None,
name: Optional[str] = None,
script: Optional[str] = None,
script_lines: Optional[Sequence[str]] = None,
sinks: Optional[Sequence[DataFlowSinkArgs]] = None,
sources: Optional[Sequence[DataFlowSourceArgs]] = None,
transformations: Optional[Sequence[DataFlowTransformationArgs]] = None) -> DataFlow
func GetDataFlow(ctx *Context, name string, id IDInput, state *DataFlowState, opts ...ResourceOption) (*DataFlow, error)
public static DataFlow Get(string name, Input<string> id, DataFlowState? state, CustomResourceOptions? opts = null)
public static DataFlow get(String name, Output<String> id, DataFlowState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Annotations List<string>
List of tags that can be used for describing the Data Factory Data Flow.
- Data
Factory stringId The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
- Description string
The description for the Data Factory Data Flow.
- Folder string
The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
- Name string
Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.
- Script string
The script for the Data Factory Data Flow.
- Script
Lines List<string> The script lines for the Data Factory Data Flow.
- Sinks
List<Data
Flow Sink> One or more
sink
blocks as defined below.- Sources
List<Data
Flow Source> One or more
source
blocks as defined below.- Transformations
List<Data
Flow Transformation> One or more
transformation
blocks as defined below.
- Annotations []string
List of tags that can be used for describing the Data Factory Data Flow.
- Data
Factory stringId The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
- Description string
The description for the Data Factory Data Flow.
- Folder string
The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
- Name string
Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.
- Script string
The script for the Data Factory Data Flow.
- Script
Lines []string The script lines for the Data Factory Data Flow.
- Sinks
[]Data
Flow Sink Args One or more
sink
blocks as defined below.- Sources
[]Data
Flow Source Args One or more
source
blocks as defined below.- Transformations
[]Data
Flow Transformation Args One or more
transformation
blocks as defined below.
- annotations List<String>
List of tags that can be used for describing the Data Factory Data Flow.
- data
Factory StringId The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
- description String
The description for the Data Factory Data Flow.
- folder String
The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
- name String
Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.
- script String
The script for the Data Factory Data Flow.
- script
Lines List<String> The script lines for the Data Factory Data Flow.
- sinks
List<Data
Flow Sink> One or more
sink
blocks as defined below.- sources
List<Data
Flow Source> One or more
source
blocks as defined below.- transformations
List<Data
Flow Transformation> One or more
transformation
blocks as defined below.
- annotations string[]
List of tags that can be used for describing the Data Factory Data Flow.
- data
Factory stringId The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
- description string
The description for the Data Factory Data Flow.
- folder string
The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
- name string
Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.
- script string
The script for the Data Factory Data Flow.
- script
Lines string[] The script lines for the Data Factory Data Flow.
- sinks
Data
Flow Sink[] One or more
sink
blocks as defined below.- sources
Data
Flow Source[] One or more
source
blocks as defined below.- transformations
Data
Flow Transformation[] One or more
transformation
blocks as defined below.
- annotations Sequence[str]
List of tags that can be used for describing the Data Factory Data Flow.
- data_
factory_ strid The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
- description str
The description for the Data Factory Data Flow.
- folder str
The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
- name str
Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.
- script str
The script for the Data Factory Data Flow.
- script_
lines Sequence[str] The script lines for the Data Factory Data Flow.
- sinks
Sequence[Data
Flow Sink Args] One or more
sink
blocks as defined below.- sources
Sequence[Data
Flow Source Args] One or more
source
blocks as defined below.- transformations
Sequence[Data
Flow Transformation Args] One or more
transformation
blocks as defined below.
- annotations List<String>
List of tags that can be used for describing the Data Factory Data Flow.
- data
Factory StringId The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.
- description String
The description for the Data Factory Data Flow.
- folder String
The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.
- name String
Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.
- script String
The script for the Data Factory Data Flow.
- script
Lines List<String> The script lines for the Data Factory Data Flow.
- sinks List<Property Map>
One or more
sink
blocks as defined below.- sources List<Property Map>
One or more
source
blocks as defined below.- transformations List<Property Map>
One or more
transformation
blocks as defined below.
Supporting Types
DataFlowSink, DataFlowSinkArgs
- Name string
The name for the Data Flow Source.
- Dataset
Data
Flow Sink Dataset A
dataset
block as defined below.- Description string
The description for the Data Flow Source.
- Flowlet
Data
Flow Sink Flowlet A
flowlet
block as defined below.- Linked
Service DataFlow Sink Linked Service A
linked_service
block as defined below.- Rejected
Linked DataService Flow Sink Rejected Linked Service A
rejected_linked_service
block as defined below.- Schema
Linked DataService Flow Sink Schema Linked Service A
schema_linked_service
block as defined below.
- Name string
The name for the Data Flow Source.
- Dataset
Data
Flow Sink Dataset A
dataset
block as defined below.- Description string
The description for the Data Flow Source.
- Flowlet
Data
Flow Sink Flowlet A
flowlet
block as defined below.- Linked
Service DataFlow Sink Linked Service A
linked_service
block as defined below.- Rejected
Linked DataService Flow Sink Rejected Linked Service A
rejected_linked_service
block as defined below.- Schema
Linked DataService Flow Sink Schema Linked Service A
schema_linked_service
block as defined below.
- name String
The name for the Data Flow Source.
- dataset
Data
Flow Sink Dataset A
dataset
block as defined below.- description String
The description for the Data Flow Source.
- flowlet
Data
Flow Sink Flowlet A
flowlet
block as defined below.- linked
Service DataFlow Sink Linked Service A
linked_service
block as defined below.- rejected
Linked DataService Flow Sink Rejected Linked Service A
rejected_linked_service
block as defined below.- schema
Linked DataService Flow Sink Schema Linked Service A
schema_linked_service
block as defined below.
- name string
The name for the Data Flow Source.
- dataset
Data
Flow Sink Dataset A
dataset
block as defined below.- description string
The description for the Data Flow Source.
- flowlet
Data
Flow Sink Flowlet A
flowlet
block as defined below.- linked
Service DataFlow Sink Linked Service A
linked_service
block as defined below.- rejected
Linked DataService Flow Sink Rejected Linked Service A
rejected_linked_service
block as defined below.- schema
Linked DataService Flow Sink Schema Linked Service A
schema_linked_service
block as defined below.
- name str
The name for the Data Flow Source.
- dataset
Data
Flow Sink Dataset A
dataset
block as defined below.- description str
The description for the Data Flow Source.
- flowlet
Data
Flow Sink Flowlet A
flowlet
block as defined below.- linked_
service DataFlow Sink Linked Service A
linked_service
block as defined below.- rejected_
linked_ Dataservice Flow Sink Rejected Linked Service A
rejected_linked_service
block as defined below.- schema_
linked_ Dataservice Flow Sink Schema Linked Service A
schema_linked_service
block as defined below.
- name String
The name for the Data Flow Source.
- dataset Property Map
A
dataset
block as defined below.- description String
The description for the Data Flow Source.
- flowlet Property Map
A
flowlet
block as defined below.- linked
Service Property Map A
linked_service
block as defined below.- rejected
Linked Property MapService A
rejected_linked_service
block as defined below.- schema
Linked Property MapService A
schema_linked_service
block as defined below.
DataFlowSinkDataset, DataFlowSinkDatasetArgs
- Name string
The name for the Data Factory Dataset.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory dataset.
- Name string
The name for the Data Factory Dataset.
- Parameters map[string]string
A map of parameters to associate with the Data Factory dataset.
- name String
The name for the Data Factory Dataset.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory dataset.
- name string
The name for the Data Factory Dataset.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory dataset.
- name str
The name for the Data Factory Dataset.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory dataset.
- name String
The name for the Data Factory Dataset.
- parameters Map<String>
A map of parameters to associate with the Data Factory dataset.
DataFlowSinkFlowlet, DataFlowSinkFlowletArgs
- Name string
The name for the Data Factory Flowlet.
- Dataset
Parameters string Specifies the reference data flow parameters from dataset.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Flowlet.
- Name string
The name for the Data Factory Flowlet.
- Dataset
Parameters string Specifies the reference data flow parameters from dataset.
- Parameters map[string]string
A map of parameters to associate with the Data Factory Flowlet.
- name String
The name for the Data Factory Flowlet.
- dataset
Parameters String Specifies the reference data flow parameters from dataset.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory Flowlet.
- name string
The name for the Data Factory Flowlet.
- dataset
Parameters string Specifies the reference data flow parameters from dataset.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Flowlet.
- name str
The name for the Data Factory Flowlet.
- dataset_
parameters str Specifies the reference data flow parameters from dataset.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Flowlet.
- name String
The name for the Data Factory Flowlet.
- dataset
Parameters String Specifies the reference data flow parameters from dataset.
- parameters Map<String>
A map of parameters to associate with the Data Factory Flowlet.
DataFlowSinkLinkedService, DataFlowSinkLinkedServiceArgs
- Name string
The name for the Data Factory Linked Service.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Linked Service.
- Name string
The name for the Data Factory Linked Service.
- Parameters map[string]string
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory Linked Service.
- name string
The name for the Data Factory Linked Service.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Linked Service.
- name str
The name for the Data Factory Linked Service.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service.
- parameters Map<String>
A map of parameters to associate with the Data Factory Linked Service.
DataFlowSinkRejectedLinkedService, DataFlowSinkRejectedLinkedServiceArgs
- Name string
The name for the Data Factory Linked Service with schema.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Linked Service.
- Name string
The name for the Data Factory Linked Service with schema.
- Parameters map[string]string
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service with schema.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory Linked Service.
- name string
The name for the Data Factory Linked Service with schema.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Linked Service.
- name str
The name for the Data Factory Linked Service with schema.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service with schema.
- parameters Map<String>
A map of parameters to associate with the Data Factory Linked Service.
DataFlowSinkSchemaLinkedService, DataFlowSinkSchemaLinkedServiceArgs
- Name string
The name for the Data Factory Linked Service with schema.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Linked Service.
- Name string
The name for the Data Factory Linked Service with schema.
- Parameters map[string]string
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service with schema.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory Linked Service.
- name string
The name for the Data Factory Linked Service with schema.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Linked Service.
- name str
The name for the Data Factory Linked Service with schema.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service with schema.
- parameters Map<String>
A map of parameters to associate with the Data Factory Linked Service.
DataFlowSource, DataFlowSourceArgs
- Name string
The name for the Data Flow Source.
- Dataset
Data
Flow Source Dataset A
dataset
block as defined below.- Description string
The description for the Data Flow Source.
- Flowlet
Data
Flow Source Flowlet A
flowlet
block as defined below.- Linked
Service DataFlow Source Linked Service A
linked_service
block as defined below.- Rejected
Linked DataService Flow Source Rejected Linked Service A
rejected_linked_service
block as defined below.- Schema
Linked DataService Flow Source Schema Linked Service A
schema_linked_service
block as defined below.
- Name string
The name for the Data Flow Source.
- Dataset
Data
Flow Source Dataset A
dataset
block as defined below.- Description string
The description for the Data Flow Source.
- Flowlet
Data
Flow Source Flowlet A
flowlet
block as defined below.- Linked
Service DataFlow Source Linked Service A
linked_service
block as defined below.- Rejected
Linked DataService Flow Source Rejected Linked Service A
rejected_linked_service
block as defined below.- Schema
Linked DataService Flow Source Schema Linked Service A
schema_linked_service
block as defined below.
- name String
The name for the Data Flow Source.
- dataset
Data
Flow Source Dataset A
dataset
block as defined below.- description String
The description for the Data Flow Source.
- flowlet
Data
Flow Source Flowlet A
flowlet
block as defined below.- linked
Service DataFlow Source Linked Service A
linked_service
block as defined below.- rejected
Linked DataService Flow Source Rejected Linked Service A
rejected_linked_service
block as defined below.- schema
Linked DataService Flow Source Schema Linked Service A
schema_linked_service
block as defined below.
- name string
The name for the Data Flow Source.
- dataset
Data
Flow Source Dataset A
dataset
block as defined below.- description string
The description for the Data Flow Source.
- flowlet
Data
Flow Source Flowlet A
flowlet
block as defined below.- linked
Service DataFlow Source Linked Service A
linked_service
block as defined below.- rejected
Linked DataService Flow Source Rejected Linked Service A
rejected_linked_service
block as defined below.- schema
Linked DataService Flow Source Schema Linked Service A
schema_linked_service
block as defined below.
- name str
The name for the Data Flow Source.
- dataset
Data
Flow Source Dataset A
dataset
block as defined below.- description str
The description for the Data Flow Source.
- flowlet
Data
Flow Source Flowlet A
flowlet
block as defined below.- linked_
service DataFlow Source Linked Service A
linked_service
block as defined below.- rejected_
linked_ Dataservice Flow Source Rejected Linked Service A
rejected_linked_service
block as defined below.- schema_
linked_ Dataservice Flow Source Schema Linked Service A
schema_linked_service
block as defined below.
- name String
The name for the Data Flow Source.
- dataset Property Map
A
dataset
block as defined below.- description String
The description for the Data Flow Source.
- flowlet Property Map
A
flowlet
block as defined below.- linked
Service Property Map A
linked_service
block as defined below.- rejected
Linked Property MapService A
rejected_linked_service
block as defined below.- schema
Linked Property MapService A
schema_linked_service
block as defined below.
DataFlowSourceDataset, DataFlowSourceDatasetArgs
- Name string
The name for the Data Factory Dataset.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory dataset.
- Name string
The name for the Data Factory Dataset.
- Parameters map[string]string
A map of parameters to associate with the Data Factory dataset.
- name String
The name for the Data Factory Dataset.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory dataset.
- name string
The name for the Data Factory Dataset.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory dataset.
- name str
The name for the Data Factory Dataset.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory dataset.
- name String
The name for the Data Factory Dataset.
- parameters Map<String>
A map of parameters to associate with the Data Factory dataset.
DataFlowSourceFlowlet, DataFlowSourceFlowletArgs
- Name string
The name for the Data Factory Flowlet.
- Dataset
Parameters string Specifies the reference data flow parameters from dataset.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Flowlet.
- Name string
The name for the Data Factory Flowlet.
- Dataset
Parameters string Specifies the reference data flow parameters from dataset.
- Parameters map[string]string
A map of parameters to associate with the Data Factory Flowlet.
- name String
The name for the Data Factory Flowlet.
- dataset
Parameters String Specifies the reference data flow parameters from dataset.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory Flowlet.
- name string
The name for the Data Factory Flowlet.
- dataset
Parameters string Specifies the reference data flow parameters from dataset.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Flowlet.
- name str
The name for the Data Factory Flowlet.
- dataset_
parameters str Specifies the reference data flow parameters from dataset.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Flowlet.
- name String
The name for the Data Factory Flowlet.
- dataset
Parameters String Specifies the reference data flow parameters from dataset.
- parameters Map<String>
A map of parameters to associate with the Data Factory Flowlet.
DataFlowSourceLinkedService, DataFlowSourceLinkedServiceArgs
- Name string
The name for the Data Factory Linked Service.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Linked Service.
- Name string
The name for the Data Factory Linked Service.
- Parameters map[string]string
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory Linked Service.
- name string
The name for the Data Factory Linked Service.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Linked Service.
- name str
The name for the Data Factory Linked Service.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service.
- parameters Map<String>
A map of parameters to associate with the Data Factory Linked Service.
DataFlowSourceRejectedLinkedService, DataFlowSourceRejectedLinkedServiceArgs
- Name string
The name for the Data Factory Linked Service with schema.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Linked Service.
- Name string
The name for the Data Factory Linked Service with schema.
- Parameters map[string]string
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service with schema.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory Linked Service.
- name string
The name for the Data Factory Linked Service with schema.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Linked Service.
- name str
The name for the Data Factory Linked Service with schema.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service with schema.
- parameters Map<String>
A map of parameters to associate with the Data Factory Linked Service.
DataFlowSourceSchemaLinkedService, DataFlowSourceSchemaLinkedServiceArgs
- Name string
The name for the Data Factory Linked Service with schema.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Linked Service.
- Name string
The name for the Data Factory Linked Service with schema.
- Parameters map[string]string
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service with schema.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory Linked Service.
- name string
The name for the Data Factory Linked Service with schema.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Linked Service.
- name str
The name for the Data Factory Linked Service with schema.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service with schema.
- parameters Map<String>
A map of parameters to associate with the Data Factory Linked Service.
DataFlowTransformation, DataFlowTransformationArgs
- Name string
The name for the Data Flow transformation.
- Dataset
Data
Flow Transformation Dataset A
dataset
block as defined below.- Description string
The description for the Data Flow transformation.
- Flowlet
Data
Flow Transformation Flowlet A
flowlet
block as defined below.- Linked
Service DataFlow Transformation Linked Service A
linked_service
block as defined below.
- Name string
The name for the Data Flow transformation.
- Dataset
Data
Flow Transformation Dataset A
dataset
block as defined below.- Description string
The description for the Data Flow transformation.
- Flowlet
Data
Flow Transformation Flowlet A
flowlet
block as defined below.- Linked
Service DataFlow Transformation Linked Service A
linked_service
block as defined below.
- name String
The name for the Data Flow transformation.
- dataset
Data
Flow Transformation Dataset A
dataset
block as defined below.- description String
The description for the Data Flow transformation.
- flowlet
Data
Flow Transformation Flowlet A
flowlet
block as defined below.- linked
Service DataFlow Transformation Linked Service A
linked_service
block as defined below.
- name string
The name for the Data Flow transformation.
- dataset
Data
Flow Transformation Dataset A
dataset
block as defined below.- description string
The description for the Data Flow transformation.
- flowlet
Data
Flow Transformation Flowlet A
flowlet
block as defined below.- linked
Service DataFlow Transformation Linked Service A
linked_service
block as defined below.
- name str
The name for the Data Flow transformation.
- dataset
Data
Flow Transformation Dataset A
dataset
block as defined below.- description str
The description for the Data Flow transformation.
- flowlet
Data
Flow Transformation Flowlet A
flowlet
block as defined below.- linked_
service DataFlow Transformation Linked Service A
linked_service
block as defined below.
- name String
The name for the Data Flow transformation.
- dataset Property Map
A
dataset
block as defined below.- description String
The description for the Data Flow transformation.
- flowlet Property Map
A
flowlet
block as defined below.- linked
Service Property Map A
linked_service
block as defined below.
DataFlowTransformationDataset, DataFlowTransformationDatasetArgs
- Name string
The name for the Data Factory Dataset.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory dataset.
- Name string
The name for the Data Factory Dataset.
- Parameters map[string]string
A map of parameters to associate with the Data Factory dataset.
- name String
The name for the Data Factory Dataset.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory dataset.
- name string
The name for the Data Factory Dataset.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory dataset.
- name str
The name for the Data Factory Dataset.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory dataset.
- name String
The name for the Data Factory Dataset.
- parameters Map<String>
A map of parameters to associate with the Data Factory dataset.
DataFlowTransformationFlowlet, DataFlowTransformationFlowletArgs
- Name string
The name for the Data Factory Flowlet.
- Dataset
Parameters string Specifies the reference data flow parameters from dataset.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Flowlet.
- Name string
The name for the Data Factory Flowlet.
- Dataset
Parameters string Specifies the reference data flow parameters from dataset.
- Parameters map[string]string
A map of parameters to associate with the Data Factory Flowlet.
- name String
The name for the Data Factory Flowlet.
- dataset
Parameters String Specifies the reference data flow parameters from dataset.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory Flowlet.
- name string
The name for the Data Factory Flowlet.
- dataset
Parameters string Specifies the reference data flow parameters from dataset.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Flowlet.
- name str
The name for the Data Factory Flowlet.
- dataset_
parameters str Specifies the reference data flow parameters from dataset.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Flowlet.
- name String
The name for the Data Factory Flowlet.
- dataset
Parameters String Specifies the reference data flow parameters from dataset.
- parameters Map<String>
A map of parameters to associate with the Data Factory Flowlet.
DataFlowTransformationLinkedService, DataFlowTransformationLinkedServiceArgs
- Name string
The name for the Data Factory Linked Service.
- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Linked Service.
- Name string
The name for the Data Factory Linked Service.
- Parameters map[string]string
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service.
- parameters Map<String,String>
A map of parameters to associate with the Data Factory Linked Service.
- name string
The name for the Data Factory Linked Service.
- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Linked Service.
- name str
The name for the Data Factory Linked Service.
- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Linked Service.
- name String
The name for the Data Factory Linked Service.
- parameters Map<String>
A map of parameters to associate with the Data Factory Linked Service.
Import
Data Factory Data Flow can be imported using the resource id
, e.g.
$ pulumi import azure:datafactory/dataFlow:DataFlow example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/dataflows/example
Package Details
- Repository
- Azure Classic pulumi/pulumi-azure
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
azurerm
Terraform Provider.