Azure Classic

Pulumi Official
Package maintained by Pulumi
v5.14.0 published on Thursday, Jul 28, 2022 by Pulumi

DataFlow

Manages a Data Flow inside an Azure Data Factory.

Example Usage

using Pulumi;
using Azure = Pulumi.Azure;

class MyStack : Stack
{
    public MyStack()
    {
        var exampleResourceGroup = new Azure.Core.ResourceGroup("exampleResourceGroup", new Azure.Core.ResourceGroupArgs
        {
            Location = "West Europe",
        });
        var exampleAccount = new Azure.Storage.Account("exampleAccount", new Azure.Storage.AccountArgs
        {
            Location = exampleResourceGroup.Location,
            ResourceGroupName = exampleResourceGroup.Name,
            AccountTier = "Standard",
            AccountReplicationType = "LRS",
        });
        var exampleFactory = new Azure.DataFactory.Factory("exampleFactory", new Azure.DataFactory.FactoryArgs
        {
            Location = exampleResourceGroup.Location,
            ResourceGroupName = exampleResourceGroup.Name,
        });
        var exampleLinkedCustomService = new Azure.DataFactory.LinkedCustomService("exampleLinkedCustomService", new Azure.DataFactory.LinkedCustomServiceArgs
        {
            DataFactoryId = exampleFactory.Id,
            Type = "AzureBlobStorage",
            TypePropertiesJson = exampleAccount.PrimaryConnectionString.Apply(primaryConnectionString => @$"{{
  ""connectionString"": ""{primaryConnectionString}""
}}
"),
        });
        var example1 = new Azure.DataFactory.DatasetJson("example1", new Azure.DataFactory.DatasetJsonArgs
        {
            DataFactoryId = exampleFactory.Id,
            LinkedServiceName = exampleLinkedCustomService.Name,
            AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetJsonAzureBlobStorageLocationArgs
            {
                Container = "container",
                Path = "foo/bar/",
                Filename = "foo.txt",
            },
            Encoding = "UTF-8",
        });
        var example2 = new Azure.DataFactory.DatasetJson("example2", new Azure.DataFactory.DatasetJsonArgs
        {
            DataFactoryId = exampleFactory.Id,
            LinkedServiceName = exampleLinkedCustomService.Name,
            AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetJsonAzureBlobStorageLocationArgs
            {
                Container = "container",
                Path = "foo/bar/",
                Filename = "bar.txt",
            },
            Encoding = "UTF-8",
        });
        var exampleDataFlow = new Azure.DataFactory.DataFlow("exampleDataFlow", new Azure.DataFactory.DataFlowArgs
        {
            DataFactoryId = exampleFactory.Id,
            Sources = 
            {
                new Azure.DataFactory.Inputs.DataFlowSourceArgs
                {
                    Name = "source1",
                    Dataset = new Azure.DataFactory.Inputs.DataFlowSourceDatasetArgs
                    {
                        Name = example1.Name,
                    },
                },
            },
            Sinks = 
            {
                new Azure.DataFactory.Inputs.DataFlowSinkArgs
                {
                    Name = "sink1",
                    Dataset = new Azure.DataFactory.Inputs.DataFlowSinkDatasetArgs
                    {
                        Name = example2.Name,
                    },
                },
            },
            Script = @"source(
  allowSchemaDrift: true, 
  validateSchema: false, 
  limit: 100, 
  ignoreNoFilesFound: false, 
  documentForm: 'documentPerLine') ~> source1 
source1 sink(
  allowSchemaDrift: true, 
  validateSchema: false, 
  skipDuplicateMapInputs: true, 
  skipDuplicateMapOutputs: true) ~> sink1
",
        });
    }

}
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/storage"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		exampleResourceGroup, err := core.NewResourceGroup(ctx, "exampleResourceGroup", &core.ResourceGroupArgs{
			Location: pulumi.String("West Europe"),
		})
		if err != nil {
			return err
		}
		exampleAccount, err := storage.NewAccount(ctx, "exampleAccount", &storage.AccountArgs{
			Location:               exampleResourceGroup.Location,
			ResourceGroupName:      exampleResourceGroup.Name,
			AccountTier:            pulumi.String("Standard"),
			AccountReplicationType: pulumi.String("LRS"),
		})
		if err != nil {
			return err
		}
		exampleFactory, err := datafactory.NewFactory(ctx, "exampleFactory", &datafactory.FactoryArgs{
			Location:          exampleResourceGroup.Location,
			ResourceGroupName: exampleResourceGroup.Name,
		})
		if err != nil {
			return err
		}
		exampleLinkedCustomService, err := datafactory.NewLinkedCustomService(ctx, "exampleLinkedCustomService", &datafactory.LinkedCustomServiceArgs{
			DataFactoryId: exampleFactory.ID(),
			Type:          pulumi.String("AzureBlobStorage"),
			TypePropertiesJson: exampleAccount.PrimaryConnectionString.ApplyT(func(primaryConnectionString string) (string, error) {
				return fmt.Sprintf("{\n  \"connectionString\": \"%v\"\n}\n", primaryConnectionString), nil
			}).(pulumi.StringOutput),
		})
		if err != nil {
			return err
		}
		example1, err := datafactory.NewDatasetJson(ctx, "example1", &datafactory.DatasetJsonArgs{
			DataFactoryId:     exampleFactory.ID(),
			LinkedServiceName: exampleLinkedCustomService.Name,
			AzureBlobStorageLocation: &datafactory.DatasetJsonAzureBlobStorageLocationArgs{
				Container: pulumi.String("container"),
				Path:      pulumi.String("foo/bar/"),
				Filename:  pulumi.String("foo.txt"),
			},
			Encoding: pulumi.String("UTF-8"),
		})
		if err != nil {
			return err
		}
		example2, err := datafactory.NewDatasetJson(ctx, "example2", &datafactory.DatasetJsonArgs{
			DataFactoryId:     exampleFactory.ID(),
			LinkedServiceName: exampleLinkedCustomService.Name,
			AzureBlobStorageLocation: &datafactory.DatasetJsonAzureBlobStorageLocationArgs{
				Container: pulumi.String("container"),
				Path:      pulumi.String("foo/bar/"),
				Filename:  pulumi.String("bar.txt"),
			},
			Encoding: pulumi.String("UTF-8"),
		})
		if err != nil {
			return err
		}
		_, err = datafactory.NewDataFlow(ctx, "exampleDataFlow", &datafactory.DataFlowArgs{
			DataFactoryId: exampleFactory.ID(),
			Sources: datafactory.DataFlowSourceArray{
				&datafactory.DataFlowSourceArgs{
					Name: pulumi.String("source1"),
					Dataset: &datafactory.DataFlowSourceDatasetArgs{
						Name: example1.Name,
					},
				},
			},
			Sinks: datafactory.DataFlowSinkArray{
				&datafactory.DataFlowSinkArgs{
					Name: pulumi.String("sink1"),
					Dataset: &datafactory.DataFlowSinkDatasetArgs{
						Name: example2.Name,
					},
				},
			},
			Script: pulumi.String(fmt.Sprintf(`source(
  allowSchemaDrift: true, 
  validateSchema: false, 
  limit: 100, 
  ignoreNoFilesFound: false, 
  documentForm: 'documentPerLine') ~> source1 
source1 sink(
  allowSchemaDrift: true, 
  validateSchema: false, 
  skipDuplicateMapInputs: true, 
  skipDuplicateMapOutputs: true) ~> sink1
`)),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.storage.Account;
import com.pulumi.azure.storage.AccountArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.LinkedCustomService;
import com.pulumi.azure.datafactory.LinkedCustomServiceArgs;
import com.pulumi.azure.datafactory.DatasetJson;
import com.pulumi.azure.datafactory.DatasetJsonArgs;
import com.pulumi.azure.datafactory.inputs.DatasetJsonAzureBlobStorageLocationArgs;
import com.pulumi.azure.datafactory.DataFlow;
import com.pulumi.azure.datafactory.DataFlowArgs;
import com.pulumi.azure.datafactory.inputs.DataFlowSourceArgs;
import com.pulumi.azure.datafactory.inputs.DataFlowSourceDatasetArgs;
import com.pulumi.azure.datafactory.inputs.DataFlowSinkArgs;
import com.pulumi.azure.datafactory.inputs.DataFlowSinkDatasetArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()        
            .location("West Europe")
            .build());

        var exampleAccount = new Account("exampleAccount", AccountArgs.builder()        
            .location(exampleResourceGroup.location())
            .resourceGroupName(exampleResourceGroup.name())
            .accountTier("Standard")
            .accountReplicationType("LRS")
            .build());

        var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()        
            .location(exampleResourceGroup.location())
            .resourceGroupName(exampleResourceGroup.name())
            .build());

        var exampleLinkedCustomService = new LinkedCustomService("exampleLinkedCustomService", LinkedCustomServiceArgs.builder()        
            .dataFactoryId(exampleFactory.id())
            .type("AzureBlobStorage")
            .typePropertiesJson(exampleAccount.primaryConnectionString().applyValue(primaryConnectionString -> """
{
  "connectionString": "%s"
}
", primaryConnectionString)))
            .build());

        var example1 = new DatasetJson("example1", DatasetJsonArgs.builder()        
            .dataFactoryId(exampleFactory.id())
            .linkedServiceName(exampleLinkedCustomService.name())
            .azureBlobStorageLocation(DatasetJsonAzureBlobStorageLocationArgs.builder()
                .container("container")
                .path("foo/bar/")
                .filename("foo.txt")
                .build())
            .encoding("UTF-8")
            .build());

        var example2 = new DatasetJson("example2", DatasetJsonArgs.builder()        
            .dataFactoryId(exampleFactory.id())
            .linkedServiceName(exampleLinkedCustomService.name())
            .azureBlobStorageLocation(DatasetJsonAzureBlobStorageLocationArgs.builder()
                .container("container")
                .path("foo/bar/")
                .filename("bar.txt")
                .build())
            .encoding("UTF-8")
            .build());

        var exampleDataFlow = new DataFlow("exampleDataFlow", DataFlowArgs.builder()        
            .dataFactoryId(exampleFactory.id())
            .sources(DataFlowSourceArgs.builder()
                .name("source1")
                .dataset(DataFlowSourceDatasetArgs.builder()
                    .name(example1.name())
                    .build())
                .build())
            .sinks(DataFlowSinkArgs.builder()
                .name("sink1")
                .dataset(DataFlowSinkDatasetArgs.builder()
                    .name(example2.name())
                    .build())
                .build())
            .script("""
source(
  allowSchemaDrift: true, 
  validateSchema: false, 
  limit: 100, 
  ignoreNoFilesFound: false, 
  documentForm: 'documentPerLine') ~> source1 
source1 sink(
  allowSchemaDrift: true, 
  validateSchema: false, 
  skipDuplicateMapInputs: true, 
  skipDuplicateMapOutputs: true) ~> sink1
            """)
            .build());

    }
}
import pulumi
import pulumi_azure as azure

example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.storage.Account("exampleAccount",
    location=example_resource_group.location,
    resource_group_name=example_resource_group.name,
    account_tier="Standard",
    account_replication_type="LRS")
example_factory = azure.datafactory.Factory("exampleFactory",
    location=example_resource_group.location,
    resource_group_name=example_resource_group.name)
example_linked_custom_service = azure.datafactory.LinkedCustomService("exampleLinkedCustomService",
    data_factory_id=example_factory.id,
    type="AzureBlobStorage",
    type_properties_json=example_account.primary_connection_string.apply(lambda primary_connection_string: f"""{{
  "connectionString": "{primary_connection_string}"
}}
"""))
example1 = azure.datafactory.DatasetJson("example1",
    data_factory_id=example_factory.id,
    linked_service_name=example_linked_custom_service.name,
    azure_blob_storage_location=azure.datafactory.DatasetJsonAzureBlobStorageLocationArgs(
        container="container",
        path="foo/bar/",
        filename="foo.txt",
    ),
    encoding="UTF-8")
example2 = azure.datafactory.DatasetJson("example2",
    data_factory_id=example_factory.id,
    linked_service_name=example_linked_custom_service.name,
    azure_blob_storage_location=azure.datafactory.DatasetJsonAzureBlobStorageLocationArgs(
        container="container",
        path="foo/bar/",
        filename="bar.txt",
    ),
    encoding="UTF-8")
example_data_flow = azure.datafactory.DataFlow("exampleDataFlow",
    data_factory_id=example_factory.id,
    sources=[azure.datafactory.DataFlowSourceArgs(
        name="source1",
        dataset=azure.datafactory.DataFlowSourceDatasetArgs(
            name=example1.name,
        ),
    )],
    sinks=[azure.datafactory.DataFlowSinkArgs(
        name="sink1",
        dataset=azure.datafactory.DataFlowSinkDatasetArgs(
            name=example2.name,
        ),
    )],
    script="""source(
  allowSchemaDrift: true, 
  validateSchema: false, 
  limit: 100, 
  ignoreNoFilesFound: false, 
  documentForm: 'documentPerLine') ~> source1 
source1 sink(
  allowSchemaDrift: true, 
  validateSchema: false, 
  skipDuplicateMapInputs: true, 
  skipDuplicateMapOutputs: true) ~> sink1
""")
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";

const exampleResourceGroup = new azure.core.ResourceGroup("exampleResourceGroup", {location: "West Europe"});
const exampleAccount = new azure.storage.Account("exampleAccount", {
    location: exampleResourceGroup.location,
    resourceGroupName: exampleResourceGroup.name,
    accountTier: "Standard",
    accountReplicationType: "LRS",
});
const exampleFactory = new azure.datafactory.Factory("exampleFactory", {
    location: exampleResourceGroup.location,
    resourceGroupName: exampleResourceGroup.name,
});
const exampleLinkedCustomService = new azure.datafactory.LinkedCustomService("exampleLinkedCustomService", {
    dataFactoryId: exampleFactory.id,
    type: "AzureBlobStorage",
    typePropertiesJson: pulumi.interpolate`{
  "connectionString": "${exampleAccount.primaryConnectionString}"
}
`,
});
const example1 = new azure.datafactory.DatasetJson("example1", {
    dataFactoryId: exampleFactory.id,
    linkedServiceName: exampleLinkedCustomService.name,
    azureBlobStorageLocation: {
        container: "container",
        path: "foo/bar/",
        filename: "foo.txt",
    },
    encoding: "UTF-8",
});
const example2 = new azure.datafactory.DatasetJson("example2", {
    dataFactoryId: exampleFactory.id,
    linkedServiceName: exampleLinkedCustomService.name,
    azureBlobStorageLocation: {
        container: "container",
        path: "foo/bar/",
        filename: "bar.txt",
    },
    encoding: "UTF-8",
});
const exampleDataFlow = new azure.datafactory.DataFlow("exampleDataFlow", {
    dataFactoryId: exampleFactory.id,
    sources: [{
        name: "source1",
        dataset: {
            name: example1.name,
        },
    }],
    sinks: [{
        name: "sink1",
        dataset: {
            name: example2.name,
        },
    }],
    script: `source(
  allowSchemaDrift: true, 
  validateSchema: false, 
  limit: 100, 
  ignoreNoFilesFound: false, 
  documentForm: 'documentPerLine') ~> source1 
source1 sink(
  allowSchemaDrift: true, 
  validateSchema: false, 
  skipDuplicateMapInputs: true, 
  skipDuplicateMapOutputs: true) ~> sink1
`,
});
resources:
  exampleResourceGroup:
    type: azure:core:ResourceGroup
    properties:
      location: West Europe
  exampleAccount:
    type: azure:storage:Account
    properties:
      location: ${exampleResourceGroup.location}
      resourceGroupName: ${exampleResourceGroup.name}
      accountTier: Standard
      accountReplicationType: LRS
  exampleFactory:
    type: azure:datafactory:Factory
    properties:
      location: ${exampleResourceGroup.location}
      resourceGroupName: ${exampleResourceGroup.name}
  exampleLinkedCustomService:
    type: azure:datafactory:LinkedCustomService
    properties:
      dataFactoryId: ${exampleFactory.id}
      type: AzureBlobStorage
      typePropertiesJson: |
        {
          "connectionString": "${exampleAccount.primaryConnectionString}"
        }        
  example1:
    type: azure:datafactory:DatasetJson
    properties:
      dataFactoryId: ${exampleFactory.id}
      linkedServiceName: ${exampleLinkedCustomService.name}
      azureBlobStorageLocation:
        container: container
        path: foo/bar/
        filename: foo.txt
      encoding: UTF-8
  example2:
    type: azure:datafactory:DatasetJson
    properties:
      dataFactoryId: ${exampleFactory.id}
      linkedServiceName: ${exampleLinkedCustomService.name}
      azureBlobStorageLocation:
        container: container
        path: foo/bar/
        filename: bar.txt
      encoding: UTF-8
  exampleDataFlow:
    type: azure:datafactory:DataFlow
    properties:
      dataFactoryId: ${exampleFactory.id}
      sources:
        - name: source1
          dataset:
            name: ${example1.name}
      sinks:
        - name: sink1
          dataset:
            name: ${example2.name}
      script: "source(\n  allowSchemaDrift: true, \n  validateSchema: false, \n  limit: 100, \n  ignoreNoFilesFound: false, \n  documentForm: 'documentPerLine') ~> source1 \nsource1 sink(\n  allowSchemaDrift: true, \n  validateSchema: false, \n  skipDuplicateMapInputs: true, \n  skipDuplicateMapOutputs: true) ~> sink1\n"

Create a DataFlow Resource

new DataFlow(name: string, args: DataFlowArgs, opts?: CustomResourceOptions);
@overload
def DataFlow(resource_name: str,
             opts: Optional[ResourceOptions] = None,
             annotations: Optional[Sequence[str]] = None,
             data_factory_id: Optional[str] = None,
             description: Optional[str] = None,
             folder: Optional[str] = None,
             name: Optional[str] = None,
             script: Optional[str] = None,
             script_lines: Optional[Sequence[str]] = None,
             sinks: Optional[Sequence[DataFlowSinkArgs]] = None,
             sources: Optional[Sequence[DataFlowSourceArgs]] = None,
             transformations: Optional[Sequence[DataFlowTransformationArgs]] = None)
@overload
def DataFlow(resource_name: str,
             args: DataFlowArgs,
             opts: Optional[ResourceOptions] = None)
func NewDataFlow(ctx *Context, name string, args DataFlowArgs, opts ...ResourceOption) (*DataFlow, error)
public DataFlow(string name, DataFlowArgs args, CustomResourceOptions? opts = null)
public DataFlow(String name, DataFlowArgs args)
public DataFlow(String name, DataFlowArgs args, CustomResourceOptions options)
type: azure:datafactory:DataFlow
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args DataFlowArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args DataFlowArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args DataFlowArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args DataFlowArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args DataFlowArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

DataFlow Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The DataFlow resource accepts the following input properties:

DataFactoryId string

The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.

Sinks List<DataFlowSinkArgs>

One or more sink blocks as defined below.

Sources List<DataFlowSourceArgs>

One or more source blocks as defined below.

Annotations List<string>

List of tags that can be used for describing the Data Factory Data Flow.

Description string

The description for the Data Factory Data Flow.

Folder string

The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.

Name string

Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.

Script string

The script for the Data Factory Data Flow.

ScriptLines List<string>

The script lines for the Data Factory Data Flow.

Transformations List<DataFlowTransformationArgs>

One or more transformation blocks as defined below.

DataFactoryId string

The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.

Sinks []DataFlowSinkArgs

One or more sink blocks as defined below.

Sources []DataFlowSourceArgs

One or more source blocks as defined below.

Annotations []string

List of tags that can be used for describing the Data Factory Data Flow.

Description string

The description for the Data Factory Data Flow.

Folder string

The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.

Name string

Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.

Script string

The script for the Data Factory Data Flow.

ScriptLines []string

The script lines for the Data Factory Data Flow.

Transformations []DataFlowTransformationArgs

One or more transformation blocks as defined below.

dataFactoryId String

The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.

sinks List<DataFlowSinkArgs>

One or more sink blocks as defined below.

sources List<DataFlowSourceArgs>

One or more source blocks as defined below.

annotations List<String>

List of tags that can be used for describing the Data Factory Data Flow.

description String

The description for the Data Factory Data Flow.

folder String

The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.

name String

Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.

script String

The script for the Data Factory Data Flow.

scriptLines List<String>

The script lines for the Data Factory Data Flow.

transformations List<DataFlowTransformationArgs>

One or more transformation blocks as defined below.

dataFactoryId string

The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.

sinks DataFlowSinkArgs[]

One or more sink blocks as defined below.

sources DataFlowSourceArgs[]

One or more source blocks as defined below.

annotations string[]

List of tags that can be used for describing the Data Factory Data Flow.

description string

The description for the Data Factory Data Flow.

folder string

The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.

name string

Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.

script string

The script for the Data Factory Data Flow.

scriptLines string[]

The script lines for the Data Factory Data Flow.

transformations DataFlowTransformationArgs[]

One or more transformation blocks as defined below.

data_factory_id str

The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.

sinks Sequence[DataFlowSinkArgs]

One or more sink blocks as defined below.

sources Sequence[DataFlowSourceArgs]

One or more source blocks as defined below.

annotations Sequence[str]

List of tags that can be used for describing the Data Factory Data Flow.

description str

The description for the Data Factory Data Flow.

folder str

The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.

name str

Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.

script str

The script for the Data Factory Data Flow.

script_lines Sequence[str]

The script lines for the Data Factory Data Flow.

transformations Sequence[DataFlowTransformationArgs]

One or more transformation blocks as defined below.

dataFactoryId String

The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.

sinks List<Property Map>

One or more sink blocks as defined below.

sources List<Property Map>

One or more source blocks as defined below.

annotations List<String>

List of tags that can be used for describing the Data Factory Data Flow.

description String

The description for the Data Factory Data Flow.

folder String

The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.

name String

Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.

script String

The script for the Data Factory Data Flow.

scriptLines List<String>

The script lines for the Data Factory Data Flow.

transformations List<Property Map>

One or more transformation blocks as defined below.

Outputs

All input properties are implicitly available as output properties. Additionally, the DataFlow resource produces the following output properties:

Id string

The provider-assigned unique ID for this managed resource.

Id string

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

id string

The provider-assigned unique ID for this managed resource.

id str

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

Look up an Existing DataFlow Resource

Get an existing DataFlow resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: DataFlowState, opts?: CustomResourceOptions): DataFlow
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        annotations: Optional[Sequence[str]] = None,
        data_factory_id: Optional[str] = None,
        description: Optional[str] = None,
        folder: Optional[str] = None,
        name: Optional[str] = None,
        script: Optional[str] = None,
        script_lines: Optional[Sequence[str]] = None,
        sinks: Optional[Sequence[DataFlowSinkArgs]] = None,
        sources: Optional[Sequence[DataFlowSourceArgs]] = None,
        transformations: Optional[Sequence[DataFlowTransformationArgs]] = None) -> DataFlow
func GetDataFlow(ctx *Context, name string, id IDInput, state *DataFlowState, opts ...ResourceOption) (*DataFlow, error)
public static DataFlow Get(string name, Input<string> id, DataFlowState? state, CustomResourceOptions? opts = null)
public static DataFlow get(String name, Output<String> id, DataFlowState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
Annotations List<string>

List of tags that can be used for describing the Data Factory Data Flow.

DataFactoryId string

The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.

Description string

The description for the Data Factory Data Flow.

Folder string

The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.

Name string

Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.

Script string

The script for the Data Factory Data Flow.

ScriptLines List<string>

The script lines for the Data Factory Data Flow.

Sinks List<DataFlowSinkArgs>

One or more sink blocks as defined below.

Sources List<DataFlowSourceArgs>

One or more source blocks as defined below.

Transformations List<DataFlowTransformationArgs>

One or more transformation blocks as defined below.

Annotations []string

List of tags that can be used for describing the Data Factory Data Flow.

DataFactoryId string

The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.

Description string

The description for the Data Factory Data Flow.

Folder string

The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.

Name string

Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.

Script string

The script for the Data Factory Data Flow.

ScriptLines []string

The script lines for the Data Factory Data Flow.

Sinks []DataFlowSinkArgs

One or more sink blocks as defined below.

Sources []DataFlowSourceArgs

One or more source blocks as defined below.

Transformations []DataFlowTransformationArgs

One or more transformation blocks as defined below.

annotations List<String>

List of tags that can be used for describing the Data Factory Data Flow.

dataFactoryId String

The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.

description String

The description for the Data Factory Data Flow.

folder String

The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.

name String

Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.

script String

The script for the Data Factory Data Flow.

scriptLines List<String>

The script lines for the Data Factory Data Flow.

sinks List<DataFlowSinkArgs>

One or more sink blocks as defined below.

sources List<DataFlowSourceArgs>

One or more source blocks as defined below.

transformations List<DataFlowTransformationArgs>

One or more transformation blocks as defined below.

annotations string[]

List of tags that can be used for describing the Data Factory Data Flow.

dataFactoryId string

The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.

description string

The description for the Data Factory Data Flow.

folder string

The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.

name string

Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.

script string

The script for the Data Factory Data Flow.

scriptLines string[]

The script lines for the Data Factory Data Flow.

sinks DataFlowSinkArgs[]

One or more sink blocks as defined below.

sources DataFlowSourceArgs[]

One or more source blocks as defined below.

transformations DataFlowTransformationArgs[]

One or more transformation blocks as defined below.

annotations Sequence[str]

List of tags that can be used for describing the Data Factory Data Flow.

data_factory_id str

The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.

description str

The description for the Data Factory Data Flow.

folder str

The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.

name str

Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.

script str

The script for the Data Factory Data Flow.

script_lines Sequence[str]

The script lines for the Data Factory Data Flow.

sinks Sequence[DataFlowSinkArgs]

One or more sink blocks as defined below.

sources Sequence[DataFlowSourceArgs]

One or more source blocks as defined below.

transformations Sequence[DataFlowTransformationArgs]

One or more transformation blocks as defined below.

annotations List<String>

List of tags that can be used for describing the Data Factory Data Flow.

dataFactoryId String

The ID of Data Factory in which to associate the Data Flow with. Changing this forces a new resource.

description String

The description for the Data Factory Data Flow.

folder String

The folder that this Data Flow is in. If not specified, the Data Flow will appear at the root level.

name String

Specifies the name of the Data Factory Data Flow. Changing this forces a new resource to be created.

script String

The script for the Data Factory Data Flow.

scriptLines List<String>

The script lines for the Data Factory Data Flow.

sinks List<Property Map>

One or more sink blocks as defined below.

sources List<Property Map>

One or more source blocks as defined below.

transformations List<Property Map>

One or more transformation blocks as defined below.

Supporting Types

DataFlowSink

Name string

The name for the Data Flow Source.

Dataset DataFlowSinkDataset

A dataset block as defined below.

Description string

The description for the Data Flow Source.

LinkedService DataFlowSinkLinkedService

A linked_service block as defined below.

SchemaLinkedService DataFlowSinkSchemaLinkedService

A schema_linked_service block as defined below.

Name string

The name for the Data Flow Source.

Dataset DataFlowSinkDataset

A dataset block as defined below.

Description string

The description for the Data Flow Source.

LinkedService DataFlowSinkLinkedService

A linked_service block as defined below.

SchemaLinkedService DataFlowSinkSchemaLinkedService

A schema_linked_service block as defined below.

name String

The name for the Data Flow Source.

dataset DataFlowSinkDataset

A dataset block as defined below.

description String

The description for the Data Flow Source.

linkedService DataFlowSinkLinkedService

A linked_service block as defined below.

schemaLinkedService DataFlowSinkSchemaLinkedService

A schema_linked_service block as defined below.

name string

The name for the Data Flow Source.

dataset DataFlowSinkDataset

A dataset block as defined below.

description string

The description for the Data Flow Source.

linkedService DataFlowSinkLinkedService

A linked_service block as defined below.

schemaLinkedService DataFlowSinkSchemaLinkedService

A schema_linked_service block as defined below.

name str

The name for the Data Flow Source.

dataset DataFlowSinkDataset

A dataset block as defined below.

description str

The description for the Data Flow Source.

linked_service DataFlowSinkLinkedService

A linked_service block as defined below.

schema_linked_service DataFlowSinkSchemaLinkedService

A schema_linked_service block as defined below.

name String

The name for the Data Flow Source.

dataset Property Map

A dataset block as defined below.

description String

The description for the Data Flow Source.

linkedService Property Map

A linked_service block as defined below.

schemaLinkedService Property Map

A schema_linked_service block as defined below.

DataFlowSinkDataset

Name string

The name for the Data Factory Dataset.

Parameters Dictionary<string, string>

A map of parameters to associate with the Data Factory dataset.

Name string

The name for the Data Factory Dataset.

Parameters map[string]string

A map of parameters to associate with the Data Factory dataset.

name String

The name for the Data Factory Dataset.

parameters Map<String,String>

A map of parameters to associate with the Data Factory dataset.

name string

The name for the Data Factory Dataset.

parameters {[key: string]: string}

A map of parameters to associate with the Data Factory dataset.

name str

The name for the Data Factory Dataset.

parameters Mapping[str, str]

A map of parameters to associate with the Data Factory dataset.

name String

The name for the Data Factory Dataset.

parameters Map<String>

A map of parameters to associate with the Data Factory dataset.

DataFlowSinkLinkedService

Name string

The name for the Data Factory Linked Service.

Parameters Dictionary<string, string>

A map of parameters to associate with the Data Factory Linked Service.

Name string

The name for the Data Factory Linked Service.

Parameters map[string]string

A map of parameters to associate with the Data Factory Linked Service.

name String

The name for the Data Factory Linked Service.

parameters Map<String,String>

A map of parameters to associate with the Data Factory Linked Service.

name string

The name for the Data Factory Linked Service.

parameters {[key: string]: string}

A map of parameters to associate with the Data Factory Linked Service.

name str

The name for the Data Factory Linked Service.

parameters Mapping[str, str]

A map of parameters to associate with the Data Factory Linked Service.

name String

The name for the Data Factory Linked Service.

parameters Map<String>

A map of parameters to associate with the Data Factory Linked Service.

DataFlowSinkSchemaLinkedService

Name string

The name for the Data Factory Linked Service with schema.

Parameters Dictionary<string, string>

A map of parameters to associate with the Data Factory Linked Service.

Name string

The name for the Data Factory Linked Service with schema.

Parameters map[string]string

A map of parameters to associate with the Data Factory Linked Service.

name String

The name for the Data Factory Linked Service with schema.

parameters Map<String,String>

A map of parameters to associate with the Data Factory Linked Service.

name string

The name for the Data Factory Linked Service with schema.

parameters {[key: string]: string}

A map of parameters to associate with the Data Factory Linked Service.

name str

The name for the Data Factory Linked Service with schema.

parameters Mapping[str, str]

A map of parameters to associate with the Data Factory Linked Service.

name String

The name for the Data Factory Linked Service with schema.

parameters Map<String>

A map of parameters to associate with the Data Factory Linked Service.

DataFlowSource

Name string

The name for the Data Flow Source.

Dataset DataFlowSourceDataset

A dataset block as defined below.

Description string

The description for the Data Flow Source.

LinkedService DataFlowSourceLinkedService

A linked_service block as defined below.

SchemaLinkedService DataFlowSourceSchemaLinkedService

A schema_linked_service block as defined below.

Name string

The name for the Data Flow Source.

Dataset DataFlowSourceDataset

A dataset block as defined below.

Description string

The description for the Data Flow Source.

LinkedService DataFlowSourceLinkedService

A linked_service block as defined below.

SchemaLinkedService DataFlowSourceSchemaLinkedService

A schema_linked_service block as defined below.

name String

The name for the Data Flow Source.

dataset DataFlowSourceDataset

A dataset block as defined below.

description String

The description for the Data Flow Source.

linkedService DataFlowSourceLinkedService

A linked_service block as defined below.

schemaLinkedService DataFlowSourceSchemaLinkedService

A schema_linked_service block as defined below.

name string

The name for the Data Flow Source.

dataset DataFlowSourceDataset

A dataset block as defined below.

description string

The description for the Data Flow Source.

linkedService DataFlowSourceLinkedService

A linked_service block as defined below.

schemaLinkedService DataFlowSourceSchemaLinkedService

A schema_linked_service block as defined below.

name str

The name for the Data Flow Source.

dataset DataFlowSourceDataset

A dataset block as defined below.

description str

The description for the Data Flow Source.

linked_service DataFlowSourceLinkedService

A linked_service block as defined below.

schema_linked_service DataFlowSourceSchemaLinkedService

A schema_linked_service block as defined below.

name String

The name for the Data Flow Source.

dataset Property Map

A dataset block as defined below.

description String

The description for the Data Flow Source.

linkedService Property Map

A linked_service block as defined below.

schemaLinkedService Property Map

A schema_linked_service block as defined below.

DataFlowSourceDataset

Name string

The name for the Data Factory Dataset.

Parameters Dictionary<string, string>

A map of parameters to associate with the Data Factory dataset.

Name string

The name for the Data Factory Dataset.

Parameters map[string]string

A map of parameters to associate with the Data Factory dataset.

name String

The name for the Data Factory Dataset.

parameters Map<String,String>

A map of parameters to associate with the Data Factory dataset.

name string

The name for the Data Factory Dataset.

parameters {[key: string]: string}

A map of parameters to associate with the Data Factory dataset.

name str

The name for the Data Factory Dataset.

parameters Mapping[str, str]

A map of parameters to associate with the Data Factory dataset.

name String

The name for the Data Factory Dataset.

parameters Map<String>

A map of parameters to associate with the Data Factory dataset.

DataFlowSourceLinkedService

Name string

The name for the Data Factory Linked Service.

Parameters Dictionary<string, string>

A map of parameters to associate with the Data Factory Linked Service.

Name string

The name for the Data Factory Linked Service.

Parameters map[string]string

A map of parameters to associate with the Data Factory Linked Service.

name String

The name for the Data Factory Linked Service.

parameters Map<String,String>

A map of parameters to associate with the Data Factory Linked Service.

name string

The name for the Data Factory Linked Service.

parameters {[key: string]: string}

A map of parameters to associate with the Data Factory Linked Service.

name str

The name for the Data Factory Linked Service.

parameters Mapping[str, str]

A map of parameters to associate with the Data Factory Linked Service.

name String

The name for the Data Factory Linked Service.

parameters Map<String>

A map of parameters to associate with the Data Factory Linked Service.

DataFlowSourceSchemaLinkedService

Name string

The name for the Data Factory Linked Service with schema.

Parameters Dictionary<string, string>

A map of parameters to associate with the Data Factory Linked Service.

Name string

The name for the Data Factory Linked Service with schema.

Parameters map[string]string

A map of parameters to associate with the Data Factory Linked Service.

name String

The name for the Data Factory Linked Service with schema.

parameters Map<String,String>

A map of parameters to associate with the Data Factory Linked Service.

name string

The name for the Data Factory Linked Service with schema.

parameters {[key: string]: string}

A map of parameters to associate with the Data Factory Linked Service.

name str

The name for the Data Factory Linked Service with schema.

parameters Mapping[str, str]

A map of parameters to associate with the Data Factory Linked Service.

name String

The name for the Data Factory Linked Service with schema.

parameters Map<String>

A map of parameters to associate with the Data Factory Linked Service.

DataFlowTransformation

Name string

The name for the Data Flow transformation.

Dataset DataFlowTransformationDataset

A dataset block as defined below.

Description string

The description for the Data Flow transformation.

LinkedService DataFlowTransformationLinkedService

A linked_service block as defined below.

Name string

The name for the Data Flow transformation.

Dataset DataFlowTransformationDataset

A dataset block as defined below.

Description string

The description for the Data Flow transformation.

LinkedService DataFlowTransformationLinkedService

A linked_service block as defined below.

name String

The name for the Data Flow transformation.

dataset DataFlowTransformationDataset

A dataset block as defined below.

description String

The description for the Data Flow transformation.

linkedService DataFlowTransformationLinkedService

A linked_service block as defined below.

name string

The name for the Data Flow transformation.

dataset DataFlowTransformationDataset

A dataset block as defined below.

description string

The description for the Data Flow transformation.

linkedService DataFlowTransformationLinkedService

A linked_service block as defined below.

name str

The name for the Data Flow transformation.

dataset DataFlowTransformationDataset

A dataset block as defined below.

description str

The description for the Data Flow transformation.

linked_service DataFlowTransformationLinkedService

A linked_service block as defined below.

name String

The name for the Data Flow transformation.

dataset Property Map

A dataset block as defined below.

description String

The description for the Data Flow transformation.

linkedService Property Map

A linked_service block as defined below.

DataFlowTransformationDataset

Name string

The name for the Data Factory Dataset.

Parameters Dictionary<string, string>

A map of parameters to associate with the Data Factory dataset.

Name string

The name for the Data Factory Dataset.

Parameters map[string]string

A map of parameters to associate with the Data Factory dataset.

name String

The name for the Data Factory Dataset.

parameters Map<String,String>

A map of parameters to associate with the Data Factory dataset.

name string

The name for the Data Factory Dataset.

parameters {[key: string]: string}

A map of parameters to associate with the Data Factory dataset.

name str

The name for the Data Factory Dataset.

parameters Mapping[str, str]

A map of parameters to associate with the Data Factory dataset.

name String

The name for the Data Factory Dataset.

parameters Map<String>

A map of parameters to associate with the Data Factory dataset.

DataFlowTransformationLinkedService

Name string

The name for the Data Factory Linked Service.

Parameters Dictionary<string, string>

A map of parameters to associate with the Data Factory Linked Service.

Name string

The name for the Data Factory Linked Service.

Parameters map[string]string

A map of parameters to associate with the Data Factory Linked Service.

name String

The name for the Data Factory Linked Service.

parameters Map<String,String>

A map of parameters to associate with the Data Factory Linked Service.

name string

The name for the Data Factory Linked Service.

parameters {[key: string]: string}

A map of parameters to associate with the Data Factory Linked Service.

name str

The name for the Data Factory Linked Service.

parameters Mapping[str, str]

A map of parameters to associate with the Data Factory Linked Service.

name String

The name for the Data Factory Linked Service.

parameters Map<String>

A map of parameters to associate with the Data Factory Linked Service.

Import

Data Factory Data Flow can be imported using the resource id, e.g.

 $ pulumi import azure:datafactory/dataFlow:DataFlow example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/dataflows/example

Package Details

Repository
https://github.com/pulumi/pulumi-azure
License
Apache-2.0
Notes

This Pulumi package is based on the azurerm Terraform Provider.