azure.datafactory.DatasetDelimitedText

Manages an Azure Delimited Text Dataset inside an Azure Data Factory.

Example Usage

using System.Collections.Generic;
using Pulumi;
using Azure = Pulumi.Azure;

return await Deployment.RunAsync(() => 
{
    var exampleResourceGroup = new Azure.Core.ResourceGroup("exampleResourceGroup", new()
    {
        Location = "West Europe",
    });

    var exampleFactory = new Azure.DataFactory.Factory("exampleFactory", new()
    {
        Location = exampleResourceGroup.Location,
        ResourceGroupName = exampleResourceGroup.Name,
    });

    var exampleLinkedServiceWeb = new Azure.DataFactory.LinkedServiceWeb("exampleLinkedServiceWeb", new()
    {
        DataFactoryId = exampleFactory.Id,
        AuthenticationType = "Anonymous",
        Url = "https://www.bing.com",
    });

    var exampleDatasetDelimitedText = new Azure.DataFactory.DatasetDelimitedText("exampleDatasetDelimitedText", new()
    {
        DataFactoryId = exampleFactory.Id,
        LinkedServiceName = exampleLinkedServiceWeb.Name,
        HttpServerLocation = new Azure.DataFactory.Inputs.DatasetDelimitedTextHttpServerLocationArgs
        {
            RelativeUrl = "http://www.bing.com",
            Path = "foo/bar/",
            Filename = "fizz.txt",
        },
        ColumnDelimiter = ",",
        RowDelimiter = "NEW",
        Encoding = "UTF-8",
        QuoteCharacter = "x",
        EscapeCharacter = "f",
        FirstRowAsHeader = true,
        NullValue = "NULL",
    });

});
package main

import (
	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		exampleResourceGroup, err := core.NewResourceGroup(ctx, "exampleResourceGroup", &core.ResourceGroupArgs{
			Location: pulumi.String("West Europe"),
		})
		if err != nil {
			return err
		}
		exampleFactory, err := datafactory.NewFactory(ctx, "exampleFactory", &datafactory.FactoryArgs{
			Location:          exampleResourceGroup.Location,
			ResourceGroupName: exampleResourceGroup.Name,
		})
		if err != nil {
			return err
		}
		exampleLinkedServiceWeb, err := datafactory.NewLinkedServiceWeb(ctx, "exampleLinkedServiceWeb", &datafactory.LinkedServiceWebArgs{
			DataFactoryId:      exampleFactory.ID(),
			AuthenticationType: pulumi.String("Anonymous"),
			Url:                pulumi.String("https://www.bing.com"),
		})
		if err != nil {
			return err
		}
		_, err = datafactory.NewDatasetDelimitedText(ctx, "exampleDatasetDelimitedText", &datafactory.DatasetDelimitedTextArgs{
			DataFactoryId:     exampleFactory.ID(),
			LinkedServiceName: exampleLinkedServiceWeb.Name,
			HttpServerLocation: &datafactory.DatasetDelimitedTextHttpServerLocationArgs{
				RelativeUrl: pulumi.String("http://www.bing.com"),
				Path:        pulumi.String("foo/bar/"),
				Filename:    pulumi.String("fizz.txt"),
			},
			ColumnDelimiter:  pulumi.String(","),
			RowDelimiter:     pulumi.String("NEW"),
			Encoding:         pulumi.String("UTF-8"),
			QuoteCharacter:   pulumi.String("x"),
			EscapeCharacter:  pulumi.String("f"),
			FirstRowAsHeader: pulumi.Bool(true),
			NullValue:        pulumi.String("NULL"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.LinkedServiceWeb;
import com.pulumi.azure.datafactory.LinkedServiceWebArgs;
import com.pulumi.azure.datafactory.DatasetDelimitedText;
import com.pulumi.azure.datafactory.DatasetDelimitedTextArgs;
import com.pulumi.azure.datafactory.inputs.DatasetDelimitedTextHttpServerLocationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()        
            .location("West Europe")
            .build());

        var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()        
            .location(exampleResourceGroup.location())
            .resourceGroupName(exampleResourceGroup.name())
            .build());

        var exampleLinkedServiceWeb = new LinkedServiceWeb("exampleLinkedServiceWeb", LinkedServiceWebArgs.builder()        
            .dataFactoryId(exampleFactory.id())
            .authenticationType("Anonymous")
            .url("https://www.bing.com")
            .build());

        var exampleDatasetDelimitedText = new DatasetDelimitedText("exampleDatasetDelimitedText", DatasetDelimitedTextArgs.builder()        
            .dataFactoryId(exampleFactory.id())
            .linkedServiceName(exampleLinkedServiceWeb.name())
            .httpServerLocation(DatasetDelimitedTextHttpServerLocationArgs.builder()
                .relativeUrl("http://www.bing.com")
                .path("foo/bar/")
                .filename("fizz.txt")
                .build())
            .columnDelimiter(",")
            .rowDelimiter("NEW")
            .encoding("UTF-8")
            .quoteCharacter("x")
            .escapeCharacter("f")
            .firstRowAsHeader(true)
            .nullValue("NULL")
            .build());

    }
}
import pulumi
import pulumi_azure as azure

example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_factory = azure.datafactory.Factory("exampleFactory",
    location=example_resource_group.location,
    resource_group_name=example_resource_group.name)
example_linked_service_web = azure.datafactory.LinkedServiceWeb("exampleLinkedServiceWeb",
    data_factory_id=example_factory.id,
    authentication_type="Anonymous",
    url="https://www.bing.com")
example_dataset_delimited_text = azure.datafactory.DatasetDelimitedText("exampleDatasetDelimitedText",
    data_factory_id=example_factory.id,
    linked_service_name=example_linked_service_web.name,
    http_server_location=azure.datafactory.DatasetDelimitedTextHttpServerLocationArgs(
        relative_url="http://www.bing.com",
        path="foo/bar/",
        filename="fizz.txt",
    ),
    column_delimiter=",",
    row_delimiter="NEW",
    encoding="UTF-8",
    quote_character="x",
    escape_character="f",
    first_row_as_header=True,
    null_value="NULL")
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";

const exampleResourceGroup = new azure.core.ResourceGroup("exampleResourceGroup", {location: "West Europe"});
const exampleFactory = new azure.datafactory.Factory("exampleFactory", {
    location: exampleResourceGroup.location,
    resourceGroupName: exampleResourceGroup.name,
});
const exampleLinkedServiceWeb = new azure.datafactory.LinkedServiceWeb("exampleLinkedServiceWeb", {
    dataFactoryId: exampleFactory.id,
    authenticationType: "Anonymous",
    url: "https://www.bing.com",
});
const exampleDatasetDelimitedText = new azure.datafactory.DatasetDelimitedText("exampleDatasetDelimitedText", {
    dataFactoryId: exampleFactory.id,
    linkedServiceName: exampleLinkedServiceWeb.name,
    httpServerLocation: {
        relativeUrl: "http://www.bing.com",
        path: "foo/bar/",
        filename: "fizz.txt",
    },
    columnDelimiter: ",",
    rowDelimiter: "NEW",
    encoding: "UTF-8",
    quoteCharacter: "x",
    escapeCharacter: "f",
    firstRowAsHeader: true,
    nullValue: "NULL",
});
resources:
  exampleResourceGroup:
    type: azure:core:ResourceGroup
    properties:
      location: West Europe
  exampleFactory:
    type: azure:datafactory:Factory
    properties:
      location: ${exampleResourceGroup.location}
      resourceGroupName: ${exampleResourceGroup.name}
  exampleLinkedServiceWeb:
    type: azure:datafactory:LinkedServiceWeb
    properties:
      dataFactoryId: ${exampleFactory.id}
      authenticationType: Anonymous
      url: https://www.bing.com
  exampleDatasetDelimitedText:
    type: azure:datafactory:DatasetDelimitedText
    properties:
      dataFactoryId: ${exampleFactory.id}
      linkedServiceName: ${exampleLinkedServiceWeb.name}
      httpServerLocation:
        relativeUrl: http://www.bing.com
        path: foo/bar/
        filename: fizz.txt
      columnDelimiter: ','
      rowDelimiter: NEW
      encoding: UTF-8
      quoteCharacter: x
      escapeCharacter: f
      firstRowAsHeader: true
      nullValue: NULL

Create DatasetDelimitedText Resource

new DatasetDelimitedText(name: string, args: DatasetDelimitedTextArgs, opts?: CustomResourceOptions);
@overload
def DatasetDelimitedText(resource_name: str,
                         opts: Optional[ResourceOptions] = None,
                         additional_properties: Optional[Mapping[str, str]] = None,
                         annotations: Optional[Sequence[str]] = None,
                         azure_blob_fs_location: Optional[DatasetDelimitedTextAzureBlobFsLocationArgs] = None,
                         azure_blob_storage_location: Optional[DatasetDelimitedTextAzureBlobStorageLocationArgs] = None,
                         column_delimiter: Optional[str] = None,
                         compression_codec: Optional[str] = None,
                         compression_level: Optional[str] = None,
                         data_factory_id: Optional[str] = None,
                         description: Optional[str] = None,
                         encoding: Optional[str] = None,
                         escape_character: Optional[str] = None,
                         first_row_as_header: Optional[bool] = None,
                         folder: Optional[str] = None,
                         http_server_location: Optional[DatasetDelimitedTextHttpServerLocationArgs] = None,
                         linked_service_name: Optional[str] = None,
                         name: Optional[str] = None,
                         null_value: Optional[str] = None,
                         parameters: Optional[Mapping[str, str]] = None,
                         quote_character: Optional[str] = None,
                         row_delimiter: Optional[str] = None,
                         schema_columns: Optional[Sequence[DatasetDelimitedTextSchemaColumnArgs]] = None)
@overload
def DatasetDelimitedText(resource_name: str,
                         args: DatasetDelimitedTextArgs,
                         opts: Optional[ResourceOptions] = None)
func NewDatasetDelimitedText(ctx *Context, name string, args DatasetDelimitedTextArgs, opts ...ResourceOption) (*DatasetDelimitedText, error)
public DatasetDelimitedText(string name, DatasetDelimitedTextArgs args, CustomResourceOptions? opts = null)
public DatasetDelimitedText(String name, DatasetDelimitedTextArgs args)
public DatasetDelimitedText(String name, DatasetDelimitedTextArgs args, CustomResourceOptions options)
type: azure:datafactory:DatasetDelimitedText
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args DatasetDelimitedTextArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args DatasetDelimitedTextArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args DatasetDelimitedTextArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args DatasetDelimitedTextArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args DatasetDelimitedTextArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

DatasetDelimitedText Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The DatasetDelimitedText resource accepts the following input properties:

DataFactoryId string

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

LinkedServiceName string

The Data Factory Linked Service name in which to associate the Dataset with.

AdditionalProperties Dictionary<string, string>

A map of additional properties to associate with the Data Factory Dataset.

Annotations List<string>

List of tags that can be used for describing the Data Factory Dataset.

AzureBlobFsLocation DatasetDelimitedTextAzureBlobFsLocationArgs

An azure_blob_fs_location block as defined below.

AzureBlobStorageLocation DatasetDelimitedTextAzureBlobStorageLocationArgs

An azure_blob_storage_location block as defined below.

ColumnDelimiter string

The column delimiter. Defaults to ,.

CompressionCodec string

The compression codec used to read/write text files. Valid values are None, bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy and lz4. Please note these values are case sensitive.

CompressionLevel string

The compression ratio for the Data Factory Dataset. Valid values are Fastest or Optimal. Please note these values are case sensitive.

Description string

The description for the Data Factory Dataset.

Encoding string

The encoding format for the file.

EscapeCharacter string

The escape character. Defaults to \.

FirstRowAsHeader bool

When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.

Folder string

The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

HttpServerLocation DatasetDelimitedTextHttpServerLocationArgs

A http_server_location block as defined below.

Name string

Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

NullValue string

The null value string. Defaults to an empty string. Defaults to "".

Parameters Dictionary<string, string>

A map of parameters to associate with the Data Factory Dataset.

QuoteCharacter string

The quote character. Defaults to ".

RowDelimiter string

The row delimiter. Defaults to any of the following values on read: \r\n, \r, \n, and \n or \r\n on write by mapping data flow and Copy activity respectively.

SchemaColumns List<DatasetDelimitedTextSchemaColumnArgs>

A schema_column block as defined below.

DataFactoryId string

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

LinkedServiceName string

The Data Factory Linked Service name in which to associate the Dataset with.

AdditionalProperties map[string]string

A map of additional properties to associate with the Data Factory Dataset.

Annotations []string

List of tags that can be used for describing the Data Factory Dataset.

AzureBlobFsLocation DatasetDelimitedTextAzureBlobFsLocationArgs

An azure_blob_fs_location block as defined below.

AzureBlobStorageLocation DatasetDelimitedTextAzureBlobStorageLocationArgs

An azure_blob_storage_location block as defined below.

ColumnDelimiter string

The column delimiter. Defaults to ,.

CompressionCodec string

The compression codec used to read/write text files. Valid values are None, bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy and lz4. Please note these values are case sensitive.

CompressionLevel string

The compression ratio for the Data Factory Dataset. Valid values are Fastest or Optimal. Please note these values are case sensitive.

Description string

The description for the Data Factory Dataset.

Encoding string

The encoding format for the file.

EscapeCharacter string

The escape character. Defaults to \.

FirstRowAsHeader bool

When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.

Folder string

The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

HttpServerLocation DatasetDelimitedTextHttpServerLocationArgs

A http_server_location block as defined below.

Name string

Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

NullValue string

The null value string. Defaults to an empty string. Defaults to "".

Parameters map[string]string

A map of parameters to associate with the Data Factory Dataset.

QuoteCharacter string

The quote character. Defaults to ".

RowDelimiter string

The row delimiter. Defaults to any of the following values on read: \r\n, \r, \n, and \n or \r\n on write by mapping data flow and Copy activity respectively.

SchemaColumns []DatasetDelimitedTextSchemaColumnArgs

A schema_column block as defined below.

dataFactoryId String

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

linkedServiceName String

The Data Factory Linked Service name in which to associate the Dataset with.

additionalProperties Map<String,String>

A map of additional properties to associate with the Data Factory Dataset.

annotations List<String>

List of tags that can be used for describing the Data Factory Dataset.

azureBlobFsLocation DatasetDelimitedTextAzureBlobFsLocationArgs

An azure_blob_fs_location block as defined below.

azureBlobStorageLocation DatasetDelimitedTextAzureBlobStorageLocationArgs

An azure_blob_storage_location block as defined below.

columnDelimiter String

The column delimiter. Defaults to ,.

compressionCodec String

The compression codec used to read/write text files. Valid values are None, bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy and lz4. Please note these values are case sensitive.

compressionLevel String

The compression ratio for the Data Factory Dataset. Valid values are Fastest or Optimal. Please note these values are case sensitive.

description String

The description for the Data Factory Dataset.

encoding String

The encoding format for the file.

escapeCharacter String

The escape character. Defaults to \.

firstRowAsHeader Boolean

When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.

folder String

The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

httpServerLocation DatasetDelimitedTextHttpServerLocationArgs

A http_server_location block as defined below.

name String

Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

nullValue String

The null value string. Defaults to an empty string. Defaults to "".

parameters Map<String,String>

A map of parameters to associate with the Data Factory Dataset.

quoteCharacter String

The quote character. Defaults to ".

rowDelimiter String

The row delimiter. Defaults to any of the following values on read: \r\n, \r, \n, and \n or \r\n on write by mapping data flow and Copy activity respectively.

schemaColumns List<DatasetDelimitedTextSchemaColumnArgs>

A schema_column block as defined below.

dataFactoryId string

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

linkedServiceName string

The Data Factory Linked Service name in which to associate the Dataset with.

additionalProperties {[key: string]: string}

A map of additional properties to associate with the Data Factory Dataset.

annotations string[]

List of tags that can be used for describing the Data Factory Dataset.

azureBlobFsLocation DatasetDelimitedTextAzureBlobFsLocationArgs

An azure_blob_fs_location block as defined below.

azureBlobStorageLocation DatasetDelimitedTextAzureBlobStorageLocationArgs

An azure_blob_storage_location block as defined below.

columnDelimiter string

The column delimiter. Defaults to ,.

compressionCodec string

The compression codec used to read/write text files. Valid values are None, bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy and lz4. Please note these values are case sensitive.

compressionLevel string

The compression ratio for the Data Factory Dataset. Valid values are Fastest or Optimal. Please note these values are case sensitive.

description string

The description for the Data Factory Dataset.

encoding string

The encoding format for the file.

escapeCharacter string

The escape character. Defaults to \.

firstRowAsHeader boolean

When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.

folder string

The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

httpServerLocation DatasetDelimitedTextHttpServerLocationArgs

A http_server_location block as defined below.

name string

Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

nullValue string

The null value string. Defaults to an empty string. Defaults to "".

parameters {[key: string]: string}

A map of parameters to associate with the Data Factory Dataset.

quoteCharacter string

The quote character. Defaults to ".

rowDelimiter string

The row delimiter. Defaults to any of the following values on read: \r\n, \r, \n, and \n or \r\n on write by mapping data flow and Copy activity respectively.

schemaColumns DatasetDelimitedTextSchemaColumnArgs[]

A schema_column block as defined below.

data_factory_id str

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

linked_service_name str

The Data Factory Linked Service name in which to associate the Dataset with.

additional_properties Mapping[str, str]

A map of additional properties to associate with the Data Factory Dataset.

annotations Sequence[str]

List of tags that can be used for describing the Data Factory Dataset.

azure_blob_fs_location DatasetDelimitedTextAzureBlobFsLocationArgs

An azure_blob_fs_location block as defined below.

azure_blob_storage_location DatasetDelimitedTextAzureBlobStorageLocationArgs

An azure_blob_storage_location block as defined below.

column_delimiter str

The column delimiter. Defaults to ,.

compression_codec str

The compression codec used to read/write text files. Valid values are None, bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy and lz4. Please note these values are case sensitive.

compression_level str

The compression ratio for the Data Factory Dataset. Valid values are Fastest or Optimal. Please note these values are case sensitive.

description str

The description for the Data Factory Dataset.

encoding str

The encoding format for the file.

escape_character str

The escape character. Defaults to \.

first_row_as_header bool

When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.

folder str

The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

http_server_location DatasetDelimitedTextHttpServerLocationArgs

A http_server_location block as defined below.

name str

Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

null_value str

The null value string. Defaults to an empty string. Defaults to "".

parameters Mapping[str, str]

A map of parameters to associate with the Data Factory Dataset.

quote_character str

The quote character. Defaults to ".

row_delimiter str

The row delimiter. Defaults to any of the following values on read: \r\n, \r, \n, and \n or \r\n on write by mapping data flow and Copy activity respectively.

schema_columns Sequence[DatasetDelimitedTextSchemaColumnArgs]

A schema_column block as defined below.

dataFactoryId String

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

linkedServiceName String

The Data Factory Linked Service name in which to associate the Dataset with.

additionalProperties Map<String>

A map of additional properties to associate with the Data Factory Dataset.

annotations List<String>

List of tags that can be used for describing the Data Factory Dataset.

azureBlobFsLocation Property Map

An azure_blob_fs_location block as defined below.

azureBlobStorageLocation Property Map

An azure_blob_storage_location block as defined below.

columnDelimiter String

The column delimiter. Defaults to ,.

compressionCodec String

The compression codec used to read/write text files. Valid values are None, bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy and lz4. Please note these values are case sensitive.

compressionLevel String

The compression ratio for the Data Factory Dataset. Valid values are Fastest or Optimal. Please note these values are case sensitive.

description String

The description for the Data Factory Dataset.

encoding String

The encoding format for the file.

escapeCharacter String

The escape character. Defaults to \.

firstRowAsHeader Boolean

When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.

folder String

The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

httpServerLocation Property Map

A http_server_location block as defined below.

name String

Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

nullValue String

The null value string. Defaults to an empty string. Defaults to "".

parameters Map<String>

A map of parameters to associate with the Data Factory Dataset.

quoteCharacter String

The quote character. Defaults to ".

rowDelimiter String

The row delimiter. Defaults to any of the following values on read: \r\n, \r, \n, and \n or \r\n on write by mapping data flow and Copy activity respectively.

schemaColumns List<Property Map>

A schema_column block as defined below.

Outputs

All input properties are implicitly available as output properties. Additionally, the DatasetDelimitedText resource produces the following output properties:

Id string

The provider-assigned unique ID for this managed resource.

Id string

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

id string

The provider-assigned unique ID for this managed resource.

id str

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

Look up Existing DatasetDelimitedText Resource

Get an existing DatasetDelimitedText resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: DatasetDelimitedTextState, opts?: CustomResourceOptions): DatasetDelimitedText
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        additional_properties: Optional[Mapping[str, str]] = None,
        annotations: Optional[Sequence[str]] = None,
        azure_blob_fs_location: Optional[DatasetDelimitedTextAzureBlobFsLocationArgs] = None,
        azure_blob_storage_location: Optional[DatasetDelimitedTextAzureBlobStorageLocationArgs] = None,
        column_delimiter: Optional[str] = None,
        compression_codec: Optional[str] = None,
        compression_level: Optional[str] = None,
        data_factory_id: Optional[str] = None,
        description: Optional[str] = None,
        encoding: Optional[str] = None,
        escape_character: Optional[str] = None,
        first_row_as_header: Optional[bool] = None,
        folder: Optional[str] = None,
        http_server_location: Optional[DatasetDelimitedTextHttpServerLocationArgs] = None,
        linked_service_name: Optional[str] = None,
        name: Optional[str] = None,
        null_value: Optional[str] = None,
        parameters: Optional[Mapping[str, str]] = None,
        quote_character: Optional[str] = None,
        row_delimiter: Optional[str] = None,
        schema_columns: Optional[Sequence[DatasetDelimitedTextSchemaColumnArgs]] = None) -> DatasetDelimitedText
func GetDatasetDelimitedText(ctx *Context, name string, id IDInput, state *DatasetDelimitedTextState, opts ...ResourceOption) (*DatasetDelimitedText, error)
public static DatasetDelimitedText Get(string name, Input<string> id, DatasetDelimitedTextState? state, CustomResourceOptions? opts = null)
public static DatasetDelimitedText get(String name, Output<String> id, DatasetDelimitedTextState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
AdditionalProperties Dictionary<string, string>

A map of additional properties to associate with the Data Factory Dataset.

Annotations List<string>

List of tags that can be used for describing the Data Factory Dataset.

AzureBlobFsLocation DatasetDelimitedTextAzureBlobFsLocationArgs

An azure_blob_fs_location block as defined below.

AzureBlobStorageLocation DatasetDelimitedTextAzureBlobStorageLocationArgs

An azure_blob_storage_location block as defined below.

ColumnDelimiter string

The column delimiter. Defaults to ,.

CompressionCodec string

The compression codec used to read/write text files. Valid values are None, bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy and lz4. Please note these values are case sensitive.

CompressionLevel string

The compression ratio for the Data Factory Dataset. Valid values are Fastest or Optimal. Please note these values are case sensitive.

DataFactoryId string

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

Description string

The description for the Data Factory Dataset.

Encoding string

The encoding format for the file.

EscapeCharacter string

The escape character. Defaults to \.

FirstRowAsHeader bool

When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.

Folder string

The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

HttpServerLocation DatasetDelimitedTextHttpServerLocationArgs

A http_server_location block as defined below.

LinkedServiceName string

The Data Factory Linked Service name in which to associate the Dataset with.

Name string

Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

NullValue string

The null value string. Defaults to an empty string. Defaults to "".

Parameters Dictionary<string, string>

A map of parameters to associate with the Data Factory Dataset.

QuoteCharacter string

The quote character. Defaults to ".

RowDelimiter string

The row delimiter. Defaults to any of the following values on read: \r\n, \r, \n, and \n or \r\n on write by mapping data flow and Copy activity respectively.

SchemaColumns List<DatasetDelimitedTextSchemaColumnArgs>

A schema_column block as defined below.

AdditionalProperties map[string]string

A map of additional properties to associate with the Data Factory Dataset.

Annotations []string

List of tags that can be used for describing the Data Factory Dataset.

AzureBlobFsLocation DatasetDelimitedTextAzureBlobFsLocationArgs

An azure_blob_fs_location block as defined below.

AzureBlobStorageLocation DatasetDelimitedTextAzureBlobStorageLocationArgs

An azure_blob_storage_location block as defined below.

ColumnDelimiter string

The column delimiter. Defaults to ,.

CompressionCodec string

The compression codec used to read/write text files. Valid values are None, bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy and lz4. Please note these values are case sensitive.

CompressionLevel string

The compression ratio for the Data Factory Dataset. Valid values are Fastest or Optimal. Please note these values are case sensitive.

DataFactoryId string

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

Description string

The description for the Data Factory Dataset.

Encoding string

The encoding format for the file.

EscapeCharacter string

The escape character. Defaults to \.

FirstRowAsHeader bool

When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.

Folder string

The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

HttpServerLocation DatasetDelimitedTextHttpServerLocationArgs

A http_server_location block as defined below.

LinkedServiceName string

The Data Factory Linked Service name in which to associate the Dataset with.

Name string

Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

NullValue string

The null value string. Defaults to an empty string. Defaults to "".

Parameters map[string]string

A map of parameters to associate with the Data Factory Dataset.

QuoteCharacter string

The quote character. Defaults to ".

RowDelimiter string

The row delimiter. Defaults to any of the following values on read: \r\n, \r, \n, and \n or \r\n on write by mapping data flow and Copy activity respectively.

SchemaColumns []DatasetDelimitedTextSchemaColumnArgs

A schema_column block as defined below.

additionalProperties Map<String,String>

A map of additional properties to associate with the Data Factory Dataset.

annotations List<String>

List of tags that can be used for describing the Data Factory Dataset.

azureBlobFsLocation DatasetDelimitedTextAzureBlobFsLocationArgs

An azure_blob_fs_location block as defined below.

azureBlobStorageLocation DatasetDelimitedTextAzureBlobStorageLocationArgs

An azure_blob_storage_location block as defined below.

columnDelimiter String

The column delimiter. Defaults to ,.

compressionCodec String

The compression codec used to read/write text files. Valid values are None, bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy and lz4. Please note these values are case sensitive.

compressionLevel String

The compression ratio for the Data Factory Dataset. Valid values are Fastest or Optimal. Please note these values are case sensitive.

dataFactoryId String

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

description String

The description for the Data Factory Dataset.

encoding String

The encoding format for the file.

escapeCharacter String

The escape character. Defaults to \.

firstRowAsHeader Boolean

When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.

folder String

The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

httpServerLocation DatasetDelimitedTextHttpServerLocationArgs

A http_server_location block as defined below.

linkedServiceName String

The Data Factory Linked Service name in which to associate the Dataset with.

name String

Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

nullValue String

The null value string. Defaults to an empty string. Defaults to "".

parameters Map<String,String>

A map of parameters to associate with the Data Factory Dataset.

quoteCharacter String

The quote character. Defaults to ".

rowDelimiter String

The row delimiter. Defaults to any of the following values on read: \r\n, \r, \n, and \n or \r\n on write by mapping data flow and Copy activity respectively.

schemaColumns List<DatasetDelimitedTextSchemaColumnArgs>

A schema_column block as defined below.

additionalProperties {[key: string]: string}

A map of additional properties to associate with the Data Factory Dataset.

annotations string[]

List of tags that can be used for describing the Data Factory Dataset.

azureBlobFsLocation DatasetDelimitedTextAzureBlobFsLocationArgs

An azure_blob_fs_location block as defined below.

azureBlobStorageLocation DatasetDelimitedTextAzureBlobStorageLocationArgs

An azure_blob_storage_location block as defined below.

columnDelimiter string

The column delimiter. Defaults to ,.

compressionCodec string

The compression codec used to read/write text files. Valid values are None, bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy and lz4. Please note these values are case sensitive.

compressionLevel string

The compression ratio for the Data Factory Dataset. Valid values are Fastest or Optimal. Please note these values are case sensitive.

dataFactoryId string

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

description string

The description for the Data Factory Dataset.

encoding string

The encoding format for the file.

escapeCharacter string

The escape character. Defaults to \.

firstRowAsHeader boolean

When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.

folder string

The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

httpServerLocation DatasetDelimitedTextHttpServerLocationArgs

A http_server_location block as defined below.

linkedServiceName string

The Data Factory Linked Service name in which to associate the Dataset with.

name string

Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

nullValue string

The null value string. Defaults to an empty string. Defaults to "".

parameters {[key: string]: string}

A map of parameters to associate with the Data Factory Dataset.

quoteCharacter string

The quote character. Defaults to ".

rowDelimiter string

The row delimiter. Defaults to any of the following values on read: \r\n, \r, \n, and \n or \r\n on write by mapping data flow and Copy activity respectively.

schemaColumns DatasetDelimitedTextSchemaColumnArgs[]

A schema_column block as defined below.

additional_properties Mapping[str, str]

A map of additional properties to associate with the Data Factory Dataset.

annotations Sequence[str]

List of tags that can be used for describing the Data Factory Dataset.

azure_blob_fs_location DatasetDelimitedTextAzureBlobFsLocationArgs

An azure_blob_fs_location block as defined below.

azure_blob_storage_location DatasetDelimitedTextAzureBlobStorageLocationArgs

An azure_blob_storage_location block as defined below.

column_delimiter str

The column delimiter. Defaults to ,.

compression_codec str

The compression codec used to read/write text files. Valid values are None, bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy and lz4. Please note these values are case sensitive.

compression_level str

The compression ratio for the Data Factory Dataset. Valid values are Fastest or Optimal. Please note these values are case sensitive.

data_factory_id str

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

description str

The description for the Data Factory Dataset.

encoding str

The encoding format for the file.

escape_character str

The escape character. Defaults to \.

first_row_as_header bool

When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.

folder str

The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

http_server_location DatasetDelimitedTextHttpServerLocationArgs

A http_server_location block as defined below.

linked_service_name str

The Data Factory Linked Service name in which to associate the Dataset with.

name str

Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

null_value str

The null value string. Defaults to an empty string. Defaults to "".

parameters Mapping[str, str]

A map of parameters to associate with the Data Factory Dataset.

quote_character str

The quote character. Defaults to ".

row_delimiter str

The row delimiter. Defaults to any of the following values on read: \r\n, \r, \n, and \n or \r\n on write by mapping data flow and Copy activity respectively.

schema_columns Sequence[DatasetDelimitedTextSchemaColumnArgs]

A schema_column block as defined below.

additionalProperties Map<String>

A map of additional properties to associate with the Data Factory Dataset.

annotations List<String>

List of tags that can be used for describing the Data Factory Dataset.

azureBlobFsLocation Property Map

An azure_blob_fs_location block as defined below.

azureBlobStorageLocation Property Map

An azure_blob_storage_location block as defined below.

columnDelimiter String

The column delimiter. Defaults to ,.

compressionCodec String

The compression codec used to read/write text files. Valid values are None, bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy and lz4. Please note these values are case sensitive.

compressionLevel String

The compression ratio for the Data Factory Dataset. Valid values are Fastest or Optimal. Please note these values are case sensitive.

dataFactoryId String

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

description String

The description for the Data Factory Dataset.

encoding String

The encoding format for the file.

escapeCharacter String

The escape character. Defaults to \.

firstRowAsHeader Boolean

When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.

folder String

The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

httpServerLocation Property Map

A http_server_location block as defined below.

linkedServiceName String

The Data Factory Linked Service name in which to associate the Dataset with.

name String

Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

nullValue String

The null value string. Defaults to an empty string. Defaults to "".

parameters Map<String>

A map of parameters to associate with the Data Factory Dataset.

quoteCharacter String

The quote character. Defaults to ".

rowDelimiter String

The row delimiter. Defaults to any of the following values on read: \r\n, \r, \n, and \n or \r\n on write by mapping data flow and Copy activity respectively.

schemaColumns List<Property Map>

A schema_column block as defined below.

Supporting Types

DatasetDelimitedTextAzureBlobFsLocation

FileSystem string

The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.

Filename string

The filename of the file.

Path string

The folder path to the file.

FileSystem string

The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.

Filename string

The filename of the file.

Path string

The folder path to the file.

fileSystem String

The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.

filename String

The filename of the file.

path String

The folder path to the file.

fileSystem string

The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.

filename string

The filename of the file.

path string

The folder path to the file.

file_system str

The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.

filename str

The filename of the file.

path str

The folder path to the file.

fileSystem String

The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.

filename String

The filename of the file.

path String

The folder path to the file.

DatasetDelimitedTextAzureBlobStorageLocation

Container string

The container on the Azure Blob Storage Account hosting the file.

DynamicContainerEnabled bool

Is the container using dynamic expression, function or system variables? Defaults to false.

DynamicFilenameEnabled bool

Is the filename using dynamic expression, function or system variables? Defaults to false.

DynamicPathEnabled bool

Is the path using dynamic expression, function or system variables? Defaults to false.

Filename string

The filename of the file.

Path string

The folder path to the file. This can be an empty string.

Container string

The container on the Azure Blob Storage Account hosting the file.

DynamicContainerEnabled bool

Is the container using dynamic expression, function or system variables? Defaults to false.

DynamicFilenameEnabled bool

Is the filename using dynamic expression, function or system variables? Defaults to false.

DynamicPathEnabled bool

Is the path using dynamic expression, function or system variables? Defaults to false.

Filename string

The filename of the file.

Path string

The folder path to the file. This can be an empty string.

container String

The container on the Azure Blob Storage Account hosting the file.

dynamicContainerEnabled Boolean

Is the container using dynamic expression, function or system variables? Defaults to false.

dynamicFilenameEnabled Boolean

Is the filename using dynamic expression, function or system variables? Defaults to false.

dynamicPathEnabled Boolean

Is the path using dynamic expression, function or system variables? Defaults to false.

filename String

The filename of the file.

path String

The folder path to the file. This can be an empty string.

container string

The container on the Azure Blob Storage Account hosting the file.

dynamicContainerEnabled boolean

Is the container using dynamic expression, function or system variables? Defaults to false.

dynamicFilenameEnabled boolean

Is the filename using dynamic expression, function or system variables? Defaults to false.

dynamicPathEnabled boolean

Is the path using dynamic expression, function or system variables? Defaults to false.

filename string

The filename of the file.

path string

The folder path to the file. This can be an empty string.

container str

The container on the Azure Blob Storage Account hosting the file.

dynamic_container_enabled bool

Is the container using dynamic expression, function or system variables? Defaults to false.

dynamic_filename_enabled bool

Is the filename using dynamic expression, function or system variables? Defaults to false.

dynamic_path_enabled bool

Is the path using dynamic expression, function or system variables? Defaults to false.

filename str

The filename of the file.

path str

The folder path to the file. This can be an empty string.

container String

The container on the Azure Blob Storage Account hosting the file.

dynamicContainerEnabled Boolean

Is the container using dynamic expression, function or system variables? Defaults to false.

dynamicFilenameEnabled Boolean

Is the filename using dynamic expression, function or system variables? Defaults to false.

dynamicPathEnabled Boolean

Is the path using dynamic expression, function or system variables? Defaults to false.

filename String

The filename of the file.

path String

The folder path to the file. This can be an empty string.

DatasetDelimitedTextHttpServerLocation

Filename string

The filename of the file on the web server.

Path string

The folder path to the file on the web server.

RelativeUrl string

The base URL to the web server hosting the file.

DynamicFilenameEnabled bool

Is the filename using dynamic expression, function or system variables? Defaults to false.

DynamicPathEnabled bool

Is the path using dynamic expression, function or system variables? Defaults to false.

Filename string

The filename of the file on the web server.

Path string

The folder path to the file on the web server.

RelativeUrl string

The base URL to the web server hosting the file.

DynamicFilenameEnabled bool

Is the filename using dynamic expression, function or system variables? Defaults to false.

DynamicPathEnabled bool

Is the path using dynamic expression, function or system variables? Defaults to false.

filename String

The filename of the file on the web server.

path String

The folder path to the file on the web server.

relativeUrl String

The base URL to the web server hosting the file.

dynamicFilenameEnabled Boolean

Is the filename using dynamic expression, function or system variables? Defaults to false.

dynamicPathEnabled Boolean

Is the path using dynamic expression, function or system variables? Defaults to false.

filename string

The filename of the file on the web server.

path string

The folder path to the file on the web server.

relativeUrl string

The base URL to the web server hosting the file.

dynamicFilenameEnabled boolean

Is the filename using dynamic expression, function or system variables? Defaults to false.

dynamicPathEnabled boolean

Is the path using dynamic expression, function or system variables? Defaults to false.

filename str

The filename of the file on the web server.

path str

The folder path to the file on the web server.

relative_url str

The base URL to the web server hosting the file.

dynamic_filename_enabled bool

Is the filename using dynamic expression, function or system variables? Defaults to false.

dynamic_path_enabled bool

Is the path using dynamic expression, function or system variables? Defaults to false.

filename String

The filename of the file on the web server.

path String

The folder path to the file on the web server.

relativeUrl String

The base URL to the web server hosting the file.

dynamicFilenameEnabled Boolean

Is the filename using dynamic expression, function or system variables? Defaults to false.

dynamicPathEnabled Boolean

Is the path using dynamic expression, function or system variables? Defaults to false.

DatasetDelimitedTextSchemaColumn

Name string

The name of the column.

Description string

The description of the column.

Type string

Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

Name string

The name of the column.

Description string

The description of the column.

Type string

Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

name String

The name of the column.

description String

The description of the column.

type String

Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

name string

The name of the column.

description string

The description of the column.

type string

Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

name str

The name of the column.

description str

The description of the column.

type str

Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

name String

The name of the column.

description String

The description of the column.

type String

Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

Import

Data Factory Datasets can be imported using the resource id, e.g.

 $ pulumi import azure:datafactory/datasetDelimitedText:DatasetDelimitedText example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example

Package Details

Repository
Azure Classic pulumi/pulumi-azure
License
Apache-2.0
Notes

This Pulumi package is based on the azurerm Terraform Provider.