We recommend using Azure Native.
azure.datafactory.DatasetDelimitedText
Explore with Pulumi AI
Manages an Azure Delimited Text Dataset inside an Azure Data Factory.
Example Usage
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() =>
{
var exampleResourceGroup = new Azure.Core.ResourceGroup("exampleResourceGroup", new()
{
Location = "West Europe",
});
var exampleFactory = new Azure.DataFactory.Factory("exampleFactory", new()
{
Location = exampleResourceGroup.Location,
ResourceGroupName = exampleResourceGroup.Name,
});
var exampleLinkedServiceWeb = new Azure.DataFactory.LinkedServiceWeb("exampleLinkedServiceWeb", new()
{
DataFactoryId = exampleFactory.Id,
AuthenticationType = "Anonymous",
Url = "https://www.bing.com",
});
var exampleDatasetDelimitedText = new Azure.DataFactory.DatasetDelimitedText("exampleDatasetDelimitedText", new()
{
DataFactoryId = exampleFactory.Id,
LinkedServiceName = exampleLinkedServiceWeb.Name,
HttpServerLocation = new Azure.DataFactory.Inputs.DatasetDelimitedTextHttpServerLocationArgs
{
RelativeUrl = "http://www.bing.com",
Path = "foo/bar/",
Filename = "fizz.txt",
},
ColumnDelimiter = ",",
RowDelimiter = "NEW",
Encoding = "UTF-8",
QuoteCharacter = "x",
EscapeCharacter = "f",
FirstRowAsHeader = true,
NullValue = "NULL",
});
});
package main
import (
"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
exampleResourceGroup, err := core.NewResourceGroup(ctx, "exampleResourceGroup", &core.ResourceGroupArgs{
Location: pulumi.String("West Europe"),
})
if err != nil {
return err
}
exampleFactory, err := datafactory.NewFactory(ctx, "exampleFactory", &datafactory.FactoryArgs{
Location: exampleResourceGroup.Location,
ResourceGroupName: exampleResourceGroup.Name,
})
if err != nil {
return err
}
exampleLinkedServiceWeb, err := datafactory.NewLinkedServiceWeb(ctx, "exampleLinkedServiceWeb", &datafactory.LinkedServiceWebArgs{
DataFactoryId: exampleFactory.ID(),
AuthenticationType: pulumi.String("Anonymous"),
Url: pulumi.String("https://www.bing.com"),
})
if err != nil {
return err
}
_, err = datafactory.NewDatasetDelimitedText(ctx, "exampleDatasetDelimitedText", &datafactory.DatasetDelimitedTextArgs{
DataFactoryId: exampleFactory.ID(),
LinkedServiceName: exampleLinkedServiceWeb.Name,
HttpServerLocation: &datafactory.DatasetDelimitedTextHttpServerLocationArgs{
RelativeUrl: pulumi.String("http://www.bing.com"),
Path: pulumi.String("foo/bar/"),
Filename: pulumi.String("fizz.txt"),
},
ColumnDelimiter: pulumi.String(","),
RowDelimiter: pulumi.String("NEW"),
Encoding: pulumi.String("UTF-8"),
QuoteCharacter: pulumi.String("x"),
EscapeCharacter: pulumi.String("f"),
FirstRowAsHeader: pulumi.Bool(true),
NullValue: pulumi.String("NULL"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.LinkedServiceWeb;
import com.pulumi.azure.datafactory.LinkedServiceWebArgs;
import com.pulumi.azure.datafactory.DatasetDelimitedText;
import com.pulumi.azure.datafactory.DatasetDelimitedTextArgs;
import com.pulumi.azure.datafactory.inputs.DatasetDelimitedTextHttpServerLocationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()
.location("West Europe")
.build());
var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
.location(exampleResourceGroup.location())
.resourceGroupName(exampleResourceGroup.name())
.build());
var exampleLinkedServiceWeb = new LinkedServiceWeb("exampleLinkedServiceWeb", LinkedServiceWebArgs.builder()
.dataFactoryId(exampleFactory.id())
.authenticationType("Anonymous")
.url("https://www.bing.com")
.build());
var exampleDatasetDelimitedText = new DatasetDelimitedText("exampleDatasetDelimitedText", DatasetDelimitedTextArgs.builder()
.dataFactoryId(exampleFactory.id())
.linkedServiceName(exampleLinkedServiceWeb.name())
.httpServerLocation(DatasetDelimitedTextHttpServerLocationArgs.builder()
.relativeUrl("http://www.bing.com")
.path("foo/bar/")
.filename("fizz.txt")
.build())
.columnDelimiter(",")
.rowDelimiter("NEW")
.encoding("UTF-8")
.quoteCharacter("x")
.escapeCharacter("f")
.firstRowAsHeader(true)
.nullValue("NULL")
.build());
}
}
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_factory = azure.datafactory.Factory("exampleFactory",
location=example_resource_group.location,
resource_group_name=example_resource_group.name)
example_linked_service_web = azure.datafactory.LinkedServiceWeb("exampleLinkedServiceWeb",
data_factory_id=example_factory.id,
authentication_type="Anonymous",
url="https://www.bing.com")
example_dataset_delimited_text = azure.datafactory.DatasetDelimitedText("exampleDatasetDelimitedText",
data_factory_id=example_factory.id,
linked_service_name=example_linked_service_web.name,
http_server_location=azure.datafactory.DatasetDelimitedTextHttpServerLocationArgs(
relative_url="http://www.bing.com",
path="foo/bar/",
filename="fizz.txt",
),
column_delimiter=",",
row_delimiter="NEW",
encoding="UTF-8",
quote_character="x",
escape_character="f",
first_row_as_header=True,
null_value="NULL")
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const exampleResourceGroup = new azure.core.ResourceGroup("exampleResourceGroup", {location: "West Europe"});
const exampleFactory = new azure.datafactory.Factory("exampleFactory", {
location: exampleResourceGroup.location,
resourceGroupName: exampleResourceGroup.name,
});
const exampleLinkedServiceWeb = new azure.datafactory.LinkedServiceWeb("exampleLinkedServiceWeb", {
dataFactoryId: exampleFactory.id,
authenticationType: "Anonymous",
url: "https://www.bing.com",
});
const exampleDatasetDelimitedText = new azure.datafactory.DatasetDelimitedText("exampleDatasetDelimitedText", {
dataFactoryId: exampleFactory.id,
linkedServiceName: exampleLinkedServiceWeb.name,
httpServerLocation: {
relativeUrl: "http://www.bing.com",
path: "foo/bar/",
filename: "fizz.txt",
},
columnDelimiter: ",",
rowDelimiter: "NEW",
encoding: "UTF-8",
quoteCharacter: "x",
escapeCharacter: "f",
firstRowAsHeader: true,
nullValue: "NULL",
});
resources:
exampleResourceGroup:
type: azure:core:ResourceGroup
properties:
location: West Europe
exampleFactory:
type: azure:datafactory:Factory
properties:
location: ${exampleResourceGroup.location}
resourceGroupName: ${exampleResourceGroup.name}
exampleLinkedServiceWeb:
type: azure:datafactory:LinkedServiceWeb
properties:
dataFactoryId: ${exampleFactory.id}
authenticationType: Anonymous
url: https://www.bing.com
exampleDatasetDelimitedText:
type: azure:datafactory:DatasetDelimitedText
properties:
dataFactoryId: ${exampleFactory.id}
linkedServiceName: ${exampleLinkedServiceWeb.name}
httpServerLocation:
relativeUrl: http://www.bing.com
path: foo/bar/
filename: fizz.txt
columnDelimiter: ','
rowDelimiter: NEW
encoding: UTF-8
quoteCharacter: x
escapeCharacter: f
firstRowAsHeader: true
nullValue: NULL
Create DatasetDelimitedText Resource
new DatasetDelimitedText(name: string, args: DatasetDelimitedTextArgs, opts?: CustomResourceOptions);
@overload
def DatasetDelimitedText(resource_name: str,
opts: Optional[ResourceOptions] = None,
additional_properties: Optional[Mapping[str, str]] = None,
annotations: Optional[Sequence[str]] = None,
azure_blob_fs_location: Optional[DatasetDelimitedTextAzureBlobFsLocationArgs] = None,
azure_blob_storage_location: Optional[DatasetDelimitedTextAzureBlobStorageLocationArgs] = None,
column_delimiter: Optional[str] = None,
compression_codec: Optional[str] = None,
compression_level: Optional[str] = None,
data_factory_id: Optional[str] = None,
description: Optional[str] = None,
encoding: Optional[str] = None,
escape_character: Optional[str] = None,
first_row_as_header: Optional[bool] = None,
folder: Optional[str] = None,
http_server_location: Optional[DatasetDelimitedTextHttpServerLocationArgs] = None,
linked_service_name: Optional[str] = None,
name: Optional[str] = None,
null_value: Optional[str] = None,
parameters: Optional[Mapping[str, str]] = None,
quote_character: Optional[str] = None,
row_delimiter: Optional[str] = None,
schema_columns: Optional[Sequence[DatasetDelimitedTextSchemaColumnArgs]] = None)
@overload
def DatasetDelimitedText(resource_name: str,
args: DatasetDelimitedTextArgs,
opts: Optional[ResourceOptions] = None)
func NewDatasetDelimitedText(ctx *Context, name string, args DatasetDelimitedTextArgs, opts ...ResourceOption) (*DatasetDelimitedText, error)
public DatasetDelimitedText(string name, DatasetDelimitedTextArgs args, CustomResourceOptions? opts = null)
public DatasetDelimitedText(String name, DatasetDelimitedTextArgs args)
public DatasetDelimitedText(String name, DatasetDelimitedTextArgs args, CustomResourceOptions options)
type: azure:datafactory:DatasetDelimitedText
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DatasetDelimitedTextArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DatasetDelimitedTextArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DatasetDelimitedTextArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DatasetDelimitedTextArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DatasetDelimitedTextArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
DatasetDelimitedText Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The DatasetDelimitedText resource accepts the following input properties:
- Data
Factory stringId The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Linked
Service stringName The Data Factory Linked Service name in which to associate the Dataset with.
- Additional
Properties Dictionary<string, string> A map of additional properties to associate with the Data Factory Dataset.
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
- Annotations List<string>
List of tags that can be used for describing the Data Factory Dataset.
- Azure
Blob DatasetFs Location Delimited Text Azure Blob Fs Location An
azure_blob_fs_location
block as defined below.- Azure
Blob DatasetStorage Location Delimited Text Azure Blob Storage Location An
azure_blob_storage_location
block as defined below.- Column
Delimiter string The column delimiter. Defaults to
,
.- Compression
Codec string The compression codec used to read/write text files. Valid values are
None
,bzip2
,gzip
,deflate
,ZipDeflate
,TarGzip
,Tar
,snappy
andlz4
. Please note these values are case sensitive.- Compression
Level string The compression ratio for the Data Factory Dataset. Valid values are
Fastest
orOptimal
. Please note these values are case sensitive.- Description string
The description for the Data Factory Dataset.
- Encoding string
The encoding format for the file.
- Escape
Character string The escape character. Defaults to
\
.- First
Row boolAs Header When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to
false
.- Folder string
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- Http
Server DatasetLocation Delimited Text Http Server Location A
http_server_location
block as defined below.The following supported arguments are specific to Delimited Text Dataset:
- Name string
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Null
Value string The null value string. Defaults to an empty string. Defaults to
""
.- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Dataset.
- Quote
Character string The quote character. Defaults to
"
.- Row
Delimiter string The row delimiter. Defaults to any of the following values on read:
\r\n
,\r
,\n
, and\n
or\r\n
on write by mapping data flow and Copy activity respectively.- Schema
Columns List<DatasetDelimited Text Schema Column> A
schema_column
block as defined below.
- Data
Factory stringId The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Linked
Service stringName The Data Factory Linked Service name in which to associate the Dataset with.
- Additional
Properties map[string]string A map of additional properties to associate with the Data Factory Dataset.
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
- Annotations []string
List of tags that can be used for describing the Data Factory Dataset.
- Azure
Blob DatasetFs Location Delimited Text Azure Blob Fs Location Args An
azure_blob_fs_location
block as defined below.- Azure
Blob DatasetStorage Location Delimited Text Azure Blob Storage Location Args An
azure_blob_storage_location
block as defined below.- Column
Delimiter string The column delimiter. Defaults to
,
.- Compression
Codec string The compression codec used to read/write text files. Valid values are
None
,bzip2
,gzip
,deflate
,ZipDeflate
,TarGzip
,Tar
,snappy
andlz4
. Please note these values are case sensitive.- Compression
Level string The compression ratio for the Data Factory Dataset. Valid values are
Fastest
orOptimal
. Please note these values are case sensitive.- Description string
The description for the Data Factory Dataset.
- Encoding string
The encoding format for the file.
- Escape
Character string The escape character. Defaults to
\
.- First
Row boolAs Header When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to
false
.- Folder string
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- Http
Server DatasetLocation Delimited Text Http Server Location Args A
http_server_location
block as defined below.The following supported arguments are specific to Delimited Text Dataset:
- Name string
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Null
Value string The null value string. Defaults to an empty string. Defaults to
""
.- Parameters map[string]string
A map of parameters to associate with the Data Factory Dataset.
- Quote
Character string The quote character. Defaults to
"
.- Row
Delimiter string The row delimiter. Defaults to any of the following values on read:
\r\n
,\r
,\n
, and\n
or\r\n
on write by mapping data flow and Copy activity respectively.- Schema
Columns []DatasetDelimited Text Schema Column Args A
schema_column
block as defined below.
- data
Factory StringId The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- linked
Service StringName The Data Factory Linked Service name in which to associate the Dataset with.
- additional
Properties Map<String,String> A map of additional properties to associate with the Data Factory Dataset.
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
- annotations List<String>
List of tags that can be used for describing the Data Factory Dataset.
- azure
Blob DatasetFs Location Delimited Text Azure Blob Fs Location An
azure_blob_fs_location
block as defined below.- azure
Blob DatasetStorage Location Delimited Text Azure Blob Storage Location An
azure_blob_storage_location
block as defined below.- column
Delimiter String The column delimiter. Defaults to
,
.- compression
Codec String The compression codec used to read/write text files. Valid values are
None
,bzip2
,gzip
,deflate
,ZipDeflate
,TarGzip
,Tar
,snappy
andlz4
. Please note these values are case sensitive.- compression
Level String The compression ratio for the Data Factory Dataset. Valid values are
Fastest
orOptimal
. Please note these values are case sensitive.- description String
The description for the Data Factory Dataset.
- encoding String
The encoding format for the file.
- escape
Character String The escape character. Defaults to
\
.- first
Row BooleanAs Header When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to
false
.- folder String
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- http
Server DatasetLocation Delimited Text Http Server Location A
http_server_location
block as defined below.The following supported arguments are specific to Delimited Text Dataset:
- name String
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- null
Value String The null value string. Defaults to an empty string. Defaults to
""
.- parameters Map<String,String>
A map of parameters to associate with the Data Factory Dataset.
- quote
Character String The quote character. Defaults to
"
.- row
Delimiter String The row delimiter. Defaults to any of the following values on read:
\r\n
,\r
,\n
, and\n
or\r\n
on write by mapping data flow and Copy activity respectively.- schema
Columns List<DatasetDelimited Text Schema Column> A
schema_column
block as defined below.
- data
Factory stringId The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- linked
Service stringName The Data Factory Linked Service name in which to associate the Dataset with.
- additional
Properties {[key: string]: string} A map of additional properties to associate with the Data Factory Dataset.
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
- annotations string[]
List of tags that can be used for describing the Data Factory Dataset.
- azure
Blob DatasetFs Location Delimited Text Azure Blob Fs Location An
azure_blob_fs_location
block as defined below.- azure
Blob DatasetStorage Location Delimited Text Azure Blob Storage Location An
azure_blob_storage_location
block as defined below.- column
Delimiter string The column delimiter. Defaults to
,
.- compression
Codec string The compression codec used to read/write text files. Valid values are
None
,bzip2
,gzip
,deflate
,ZipDeflate
,TarGzip
,Tar
,snappy
andlz4
. Please note these values are case sensitive.- compression
Level string The compression ratio for the Data Factory Dataset. Valid values are
Fastest
orOptimal
. Please note these values are case sensitive.- description string
The description for the Data Factory Dataset.
- encoding string
The encoding format for the file.
- escape
Character string The escape character. Defaults to
\
.- first
Row booleanAs Header When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to
false
.- folder string
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- http
Server DatasetLocation Delimited Text Http Server Location A
http_server_location
block as defined below.The following supported arguments are specific to Delimited Text Dataset:
- name string
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- null
Value string The null value string. Defaults to an empty string. Defaults to
""
.- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Dataset.
- quote
Character string The quote character. Defaults to
"
.- row
Delimiter string The row delimiter. Defaults to any of the following values on read:
\r\n
,\r
,\n
, and\n
or\r\n
on write by mapping data flow and Copy activity respectively.- schema
Columns DatasetDelimited Text Schema Column[] A
schema_column
block as defined below.
- data_
factory_ strid The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- linked_
service_ strname The Data Factory Linked Service name in which to associate the Dataset with.
- additional_
properties Mapping[str, str] A map of additional properties to associate with the Data Factory Dataset.
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
- annotations Sequence[str]
List of tags that can be used for describing the Data Factory Dataset.
- azure_
blob_ Datasetfs_ location Delimited Text Azure Blob Fs Location Args An
azure_blob_fs_location
block as defined below.- azure_
blob_ Datasetstorage_ location Delimited Text Azure Blob Storage Location Args An
azure_blob_storage_location
block as defined below.- column_
delimiter str The column delimiter. Defaults to
,
.- compression_
codec str The compression codec used to read/write text files. Valid values are
None
,bzip2
,gzip
,deflate
,ZipDeflate
,TarGzip
,Tar
,snappy
andlz4
. Please note these values are case sensitive.- compression_
level str The compression ratio for the Data Factory Dataset. Valid values are
Fastest
orOptimal
. Please note these values are case sensitive.- description str
The description for the Data Factory Dataset.
- encoding str
The encoding format for the file.
- escape_
character str The escape character. Defaults to
\
.- first_
row_ boolas_ header When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to
false
.- folder str
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- http_
server_ Datasetlocation Delimited Text Http Server Location Args A
http_server_location
block as defined below.The following supported arguments are specific to Delimited Text Dataset:
- name str
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- null_
value str The null value string. Defaults to an empty string. Defaults to
""
.- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Dataset.
- quote_
character str The quote character. Defaults to
"
.- row_
delimiter str The row delimiter. Defaults to any of the following values on read:
\r\n
,\r
,\n
, and\n
or\r\n
on write by mapping data flow and Copy activity respectively.- schema_
columns Sequence[DatasetDelimited Text Schema Column Args] A
schema_column
block as defined below.
- data
Factory StringId The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- linked
Service StringName The Data Factory Linked Service name in which to associate the Dataset with.
- additional
Properties Map<String> A map of additional properties to associate with the Data Factory Dataset.
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
- annotations List<String>
List of tags that can be used for describing the Data Factory Dataset.
- azure
Blob Property MapFs Location An
azure_blob_fs_location
block as defined below.- azure
Blob Property MapStorage Location An
azure_blob_storage_location
block as defined below.- column
Delimiter String The column delimiter. Defaults to
,
.- compression
Codec String The compression codec used to read/write text files. Valid values are
None
,bzip2
,gzip
,deflate
,ZipDeflate
,TarGzip
,Tar
,snappy
andlz4
. Please note these values are case sensitive.- compression
Level String The compression ratio for the Data Factory Dataset. Valid values are
Fastest
orOptimal
. Please note these values are case sensitive.- description String
The description for the Data Factory Dataset.
- encoding String
The encoding format for the file.
- escape
Character String The escape character. Defaults to
\
.- first
Row BooleanAs Header When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to
false
.- folder String
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- http
Server Property MapLocation A
http_server_location
block as defined below.The following supported arguments are specific to Delimited Text Dataset:
- name String
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- null
Value String The null value string. Defaults to an empty string. Defaults to
""
.- parameters Map<String>
A map of parameters to associate with the Data Factory Dataset.
- quote
Character String The quote character. Defaults to
"
.- row
Delimiter String The row delimiter. Defaults to any of the following values on read:
\r\n
,\r
,\n
, and\n
or\r\n
on write by mapping data flow and Copy activity respectively.- schema
Columns List<Property Map> A
schema_column
block as defined below.
Outputs
All input properties are implicitly available as output properties. Additionally, the DatasetDelimitedText resource produces the following output properties:
- Id string
The provider-assigned unique ID for this managed resource.
- Id string
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
- id string
The provider-assigned unique ID for this managed resource.
- id str
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
Look up Existing DatasetDelimitedText Resource
Get an existing DatasetDelimitedText resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DatasetDelimitedTextState, opts?: CustomResourceOptions): DatasetDelimitedText
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
additional_properties: Optional[Mapping[str, str]] = None,
annotations: Optional[Sequence[str]] = None,
azure_blob_fs_location: Optional[DatasetDelimitedTextAzureBlobFsLocationArgs] = None,
azure_blob_storage_location: Optional[DatasetDelimitedTextAzureBlobStorageLocationArgs] = None,
column_delimiter: Optional[str] = None,
compression_codec: Optional[str] = None,
compression_level: Optional[str] = None,
data_factory_id: Optional[str] = None,
description: Optional[str] = None,
encoding: Optional[str] = None,
escape_character: Optional[str] = None,
first_row_as_header: Optional[bool] = None,
folder: Optional[str] = None,
http_server_location: Optional[DatasetDelimitedTextHttpServerLocationArgs] = None,
linked_service_name: Optional[str] = None,
name: Optional[str] = None,
null_value: Optional[str] = None,
parameters: Optional[Mapping[str, str]] = None,
quote_character: Optional[str] = None,
row_delimiter: Optional[str] = None,
schema_columns: Optional[Sequence[DatasetDelimitedTextSchemaColumnArgs]] = None) -> DatasetDelimitedText
func GetDatasetDelimitedText(ctx *Context, name string, id IDInput, state *DatasetDelimitedTextState, opts ...ResourceOption) (*DatasetDelimitedText, error)
public static DatasetDelimitedText Get(string name, Input<string> id, DatasetDelimitedTextState? state, CustomResourceOptions? opts = null)
public static DatasetDelimitedText get(String name, Output<String> id, DatasetDelimitedTextState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Additional
Properties Dictionary<string, string> A map of additional properties to associate with the Data Factory Dataset.
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
- Annotations List<string>
List of tags that can be used for describing the Data Factory Dataset.
- Azure
Blob DatasetFs Location Delimited Text Azure Blob Fs Location An
azure_blob_fs_location
block as defined below.- Azure
Blob DatasetStorage Location Delimited Text Azure Blob Storage Location An
azure_blob_storage_location
block as defined below.- Column
Delimiter string The column delimiter. Defaults to
,
.- Compression
Codec string The compression codec used to read/write text files. Valid values are
None
,bzip2
,gzip
,deflate
,ZipDeflate
,TarGzip
,Tar
,snappy
andlz4
. Please note these values are case sensitive.- Compression
Level string The compression ratio for the Data Factory Dataset. Valid values are
Fastest
orOptimal
. Please note these values are case sensitive.- Data
Factory stringId The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Description string
The description for the Data Factory Dataset.
- Encoding string
The encoding format for the file.
- Escape
Character string The escape character. Defaults to
\
.- First
Row boolAs Header When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to
false
.- Folder string
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- Http
Server DatasetLocation Delimited Text Http Server Location A
http_server_location
block as defined below.The following supported arguments are specific to Delimited Text Dataset:
- Linked
Service stringName The Data Factory Linked Service name in which to associate the Dataset with.
- Name string
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Null
Value string The null value string. Defaults to an empty string. Defaults to
""
.- Parameters Dictionary<string, string>
A map of parameters to associate with the Data Factory Dataset.
- Quote
Character string The quote character. Defaults to
"
.- Row
Delimiter string The row delimiter. Defaults to any of the following values on read:
\r\n
,\r
,\n
, and\n
or\r\n
on write by mapping data flow and Copy activity respectively.- Schema
Columns List<DatasetDelimited Text Schema Column> A
schema_column
block as defined below.
- Additional
Properties map[string]string A map of additional properties to associate with the Data Factory Dataset.
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
- Annotations []string
List of tags that can be used for describing the Data Factory Dataset.
- Azure
Blob DatasetFs Location Delimited Text Azure Blob Fs Location Args An
azure_blob_fs_location
block as defined below.- Azure
Blob DatasetStorage Location Delimited Text Azure Blob Storage Location Args An
azure_blob_storage_location
block as defined below.- Column
Delimiter string The column delimiter. Defaults to
,
.- Compression
Codec string The compression codec used to read/write text files. Valid values are
None
,bzip2
,gzip
,deflate
,ZipDeflate
,TarGzip
,Tar
,snappy
andlz4
. Please note these values are case sensitive.- Compression
Level string The compression ratio for the Data Factory Dataset. Valid values are
Fastest
orOptimal
. Please note these values are case sensitive.- Data
Factory stringId The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Description string
The description for the Data Factory Dataset.
- Encoding string
The encoding format for the file.
- Escape
Character string The escape character. Defaults to
\
.- First
Row boolAs Header When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to
false
.- Folder string
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- Http
Server DatasetLocation Delimited Text Http Server Location Args A
http_server_location
block as defined below.The following supported arguments are specific to Delimited Text Dataset:
- Linked
Service stringName The Data Factory Linked Service name in which to associate the Dataset with.
- Name string
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Null
Value string The null value string. Defaults to an empty string. Defaults to
""
.- Parameters map[string]string
A map of parameters to associate with the Data Factory Dataset.
- Quote
Character string The quote character. Defaults to
"
.- Row
Delimiter string The row delimiter. Defaults to any of the following values on read:
\r\n
,\r
,\n
, and\n
or\r\n
on write by mapping data flow and Copy activity respectively.- Schema
Columns []DatasetDelimited Text Schema Column Args A
schema_column
block as defined below.
- additional
Properties Map<String,String> A map of additional properties to associate with the Data Factory Dataset.
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
- annotations List<String>
List of tags that can be used for describing the Data Factory Dataset.
- azure
Blob DatasetFs Location Delimited Text Azure Blob Fs Location An
azure_blob_fs_location
block as defined below.- azure
Blob DatasetStorage Location Delimited Text Azure Blob Storage Location An
azure_blob_storage_location
block as defined below.- column
Delimiter String The column delimiter. Defaults to
,
.- compression
Codec String The compression codec used to read/write text files. Valid values are
None
,bzip2
,gzip
,deflate
,ZipDeflate
,TarGzip
,Tar
,snappy
andlz4
. Please note these values are case sensitive.- compression
Level String The compression ratio for the Data Factory Dataset. Valid values are
Fastest
orOptimal
. Please note these values are case sensitive.- data
Factory StringId The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description String
The description for the Data Factory Dataset.
- encoding String
The encoding format for the file.
- escape
Character String The escape character. Defaults to
\
.- first
Row BooleanAs Header When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to
false
.- folder String
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- http
Server DatasetLocation Delimited Text Http Server Location A
http_server_location
block as defined below.The following supported arguments are specific to Delimited Text Dataset:
- linked
Service StringName The Data Factory Linked Service name in which to associate the Dataset with.
- name String
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- null
Value String The null value string. Defaults to an empty string. Defaults to
""
.- parameters Map<String,String>
A map of parameters to associate with the Data Factory Dataset.
- quote
Character String The quote character. Defaults to
"
.- row
Delimiter String The row delimiter. Defaults to any of the following values on read:
\r\n
,\r
,\n
, and\n
or\r\n
on write by mapping data flow and Copy activity respectively.- schema
Columns List<DatasetDelimited Text Schema Column> A
schema_column
block as defined below.
- additional
Properties {[key: string]: string} A map of additional properties to associate with the Data Factory Dataset.
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
- annotations string[]
List of tags that can be used for describing the Data Factory Dataset.
- azure
Blob DatasetFs Location Delimited Text Azure Blob Fs Location An
azure_blob_fs_location
block as defined below.- azure
Blob DatasetStorage Location Delimited Text Azure Blob Storage Location An
azure_blob_storage_location
block as defined below.- column
Delimiter string The column delimiter. Defaults to
,
.- compression
Codec string The compression codec used to read/write text files. Valid values are
None
,bzip2
,gzip
,deflate
,ZipDeflate
,TarGzip
,Tar
,snappy
andlz4
. Please note these values are case sensitive.- compression
Level string The compression ratio for the Data Factory Dataset. Valid values are
Fastest
orOptimal
. Please note these values are case sensitive.- data
Factory stringId The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description string
The description for the Data Factory Dataset.
- encoding string
The encoding format for the file.
- escape
Character string The escape character. Defaults to
\
.- first
Row booleanAs Header When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to
false
.- folder string
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- http
Server DatasetLocation Delimited Text Http Server Location A
http_server_location
block as defined below.The following supported arguments are specific to Delimited Text Dataset:
- linked
Service stringName The Data Factory Linked Service name in which to associate the Dataset with.
- name string
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- null
Value string The null value string. Defaults to an empty string. Defaults to
""
.- parameters {[key: string]: string}
A map of parameters to associate with the Data Factory Dataset.
- quote
Character string The quote character. Defaults to
"
.- row
Delimiter string The row delimiter. Defaults to any of the following values on read:
\r\n
,\r
,\n
, and\n
or\r\n
on write by mapping data flow and Copy activity respectively.- schema
Columns DatasetDelimited Text Schema Column[] A
schema_column
block as defined below.
- additional_
properties Mapping[str, str] A map of additional properties to associate with the Data Factory Dataset.
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
- annotations Sequence[str]
List of tags that can be used for describing the Data Factory Dataset.
- azure_
blob_ Datasetfs_ location Delimited Text Azure Blob Fs Location Args An
azure_blob_fs_location
block as defined below.- azure_
blob_ Datasetstorage_ location Delimited Text Azure Blob Storage Location Args An
azure_blob_storage_location
block as defined below.- column_
delimiter str The column delimiter. Defaults to
,
.- compression_
codec str The compression codec used to read/write text files. Valid values are
None
,bzip2
,gzip
,deflate
,ZipDeflate
,TarGzip
,Tar
,snappy
andlz4
. Please note these values are case sensitive.- compression_
level str The compression ratio for the Data Factory Dataset. Valid values are
Fastest
orOptimal
. Please note these values are case sensitive.- data_
factory_ strid The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description str
The description for the Data Factory Dataset.
- encoding str
The encoding format for the file.
- escape_
character str The escape character. Defaults to
\
.- first_
row_ boolas_ header When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to
false
.- folder str
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- http_
server_ Datasetlocation Delimited Text Http Server Location Args A
http_server_location
block as defined below.The following supported arguments are specific to Delimited Text Dataset:
- linked_
service_ strname The Data Factory Linked Service name in which to associate the Dataset with.
- name str
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- null_
value str The null value string. Defaults to an empty string. Defaults to
""
.- parameters Mapping[str, str]
A map of parameters to associate with the Data Factory Dataset.
- quote_
character str The quote character. Defaults to
"
.- row_
delimiter str The row delimiter. Defaults to any of the following values on read:
\r\n
,\r
,\n
, and\n
or\r\n
on write by mapping data flow and Copy activity respectively.- schema_
columns Sequence[DatasetDelimited Text Schema Column Args] A
schema_column
block as defined below.
- additional
Properties Map<String> A map of additional properties to associate with the Data Factory Dataset.
The following supported locations for a Delimited Text Dataset (exactly one of them must be set):
- annotations List<String>
List of tags that can be used for describing the Data Factory Dataset.
- azure
Blob Property MapFs Location An
azure_blob_fs_location
block as defined below.- azure
Blob Property MapStorage Location An
azure_blob_storage_location
block as defined below.- column
Delimiter String The column delimiter. Defaults to
,
.- compression
Codec String The compression codec used to read/write text files. Valid values are
None
,bzip2
,gzip
,deflate
,ZipDeflate
,TarGzip
,Tar
,snappy
andlz4
. Please note these values are case sensitive.- compression
Level String The compression ratio for the Data Factory Dataset. Valid values are
Fastest
orOptimal
. Please note these values are case sensitive.- data
Factory StringId The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description String
The description for the Data Factory Dataset.
- encoding String
The encoding format for the file.
- escape
Character String The escape character. Defaults to
\
.- first
Row BooleanAs Header When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to
false
.- folder String
The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- http
Server Property MapLocation A
http_server_location
block as defined below.The following supported arguments are specific to Delimited Text Dataset:
- linked
Service StringName The Data Factory Linked Service name in which to associate the Dataset with.
- name String
Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- null
Value String The null value string. Defaults to an empty string. Defaults to
""
.- parameters Map<String>
A map of parameters to associate with the Data Factory Dataset.
- quote
Character String The quote character. Defaults to
"
.- row
Delimiter String The row delimiter. Defaults to any of the following values on read:
\r\n
,\r
,\n
, and\n
or\r\n
on write by mapping data flow and Copy activity respectively.- schema
Columns List<Property Map> A
schema_column
block as defined below.
Supporting Types
DatasetDelimitedTextAzureBlobFsLocation, DatasetDelimitedTextAzureBlobFsLocationArgs
- File
System string The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.
- Filename string
The filename of the file.
- Path string
The folder path to the file.
- File
System string The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.
- Filename string
The filename of the file.
- Path string
The folder path to the file.
- file
System String The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.
- filename String
The filename of the file.
- path String
The folder path to the file.
- file
System string The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.
- filename string
The filename of the file.
- path string
The folder path to the file.
- file_
system str The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.
- filename str
The filename of the file.
- path str
The folder path to the file.
- file
System String The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.
- filename String
The filename of the file.
- path String
The folder path to the file.
DatasetDelimitedTextAzureBlobStorageLocation, DatasetDelimitedTextAzureBlobStorageLocationArgs
- Container string
The container on the Azure Blob Storage Account hosting the file.
- Dynamic
Container boolEnabled Is the
container
using dynamic expression, function or system variables? Defaults tofalse
.- Dynamic
Filename boolEnabled Is the
filename
using dynamic expression, function or system variables? Defaults tofalse
.- Dynamic
Path boolEnabled Is the
path
using dynamic expression, function or system variables? Defaults tofalse
.- Filename string
The filename of the file.
- Path string
The folder path to the file. This can be an empty string.
- Container string
The container on the Azure Blob Storage Account hosting the file.
- Dynamic
Container boolEnabled Is the
container
using dynamic expression, function or system variables? Defaults tofalse
.- Dynamic
Filename boolEnabled Is the
filename
using dynamic expression, function or system variables? Defaults tofalse
.- Dynamic
Path boolEnabled Is the
path
using dynamic expression, function or system variables? Defaults tofalse
.- Filename string
The filename of the file.
- Path string
The folder path to the file. This can be an empty string.
- container String
The container on the Azure Blob Storage Account hosting the file.
- dynamic
Container BooleanEnabled Is the
container
using dynamic expression, function or system variables? Defaults tofalse
.- dynamic
Filename BooleanEnabled Is the
filename
using dynamic expression, function or system variables? Defaults tofalse
.- dynamic
Path BooleanEnabled Is the
path
using dynamic expression, function or system variables? Defaults tofalse
.- filename String
The filename of the file.
- path String
The folder path to the file. This can be an empty string.
- container string
The container on the Azure Blob Storage Account hosting the file.
- dynamic
Container booleanEnabled Is the
container
using dynamic expression, function or system variables? Defaults tofalse
.- dynamic
Filename booleanEnabled Is the
filename
using dynamic expression, function or system variables? Defaults tofalse
.- dynamic
Path booleanEnabled Is the
path
using dynamic expression, function or system variables? Defaults tofalse
.- filename string
The filename of the file.
- path string
The folder path to the file. This can be an empty string.
- container str
The container on the Azure Blob Storage Account hosting the file.
- dynamic_
container_ boolenabled Is the
container
using dynamic expression, function or system variables? Defaults tofalse
.- dynamic_
filename_ boolenabled Is the
filename
using dynamic expression, function or system variables? Defaults tofalse
.- dynamic_
path_ boolenabled Is the
path
using dynamic expression, function or system variables? Defaults tofalse
.- filename str
The filename of the file.
- path str
The folder path to the file. This can be an empty string.
- container String
The container on the Azure Blob Storage Account hosting the file.
- dynamic
Container BooleanEnabled Is the
container
using dynamic expression, function or system variables? Defaults tofalse
.- dynamic
Filename BooleanEnabled Is the
filename
using dynamic expression, function or system variables? Defaults tofalse
.- dynamic
Path BooleanEnabled Is the
path
using dynamic expression, function or system variables? Defaults tofalse
.- filename String
The filename of the file.
- path String
The folder path to the file. This can be an empty string.
DatasetDelimitedTextHttpServerLocation, DatasetDelimitedTextHttpServerLocationArgs
- Filename string
The filename of the file on the web server.
- Path string
The folder path to the file on the web server.
- Relative
Url string The base URL to the web server hosting the file.
- Dynamic
Filename boolEnabled Is the
filename
using dynamic expression, function or system variables? Defaults tofalse
.- Dynamic
Path boolEnabled Is the
path
using dynamic expression, function or system variables? Defaults tofalse
.
- Filename string
The filename of the file on the web server.
- Path string
The folder path to the file on the web server.
- Relative
Url string The base URL to the web server hosting the file.
- Dynamic
Filename boolEnabled Is the
filename
using dynamic expression, function or system variables? Defaults tofalse
.- Dynamic
Path boolEnabled Is the
path
using dynamic expression, function or system variables? Defaults tofalse
.
- filename String
The filename of the file on the web server.
- path String
The folder path to the file on the web server.
- relative
Url String The base URL to the web server hosting the file.
- dynamic
Filename BooleanEnabled Is the
filename
using dynamic expression, function or system variables? Defaults tofalse
.- dynamic
Path BooleanEnabled Is the
path
using dynamic expression, function or system variables? Defaults tofalse
.
- filename string
The filename of the file on the web server.
- path string
The folder path to the file on the web server.
- relative
Url string The base URL to the web server hosting the file.
- dynamic
Filename booleanEnabled Is the
filename
using dynamic expression, function or system variables? Defaults tofalse
.- dynamic
Path booleanEnabled Is the
path
using dynamic expression, function or system variables? Defaults tofalse
.
- filename str
The filename of the file on the web server.
- path str
The folder path to the file on the web server.
- relative_
url str The base URL to the web server hosting the file.
- dynamic_
filename_ boolenabled Is the
filename
using dynamic expression, function or system variables? Defaults tofalse
.- dynamic_
path_ boolenabled Is the
path
using dynamic expression, function or system variables? Defaults tofalse
.
- filename String
The filename of the file on the web server.
- path String
The folder path to the file on the web server.
- relative
Url String The base URL to the web server hosting the file.
- dynamic
Filename BooleanEnabled Is the
filename
using dynamic expression, function or system variables? Defaults tofalse
.- dynamic
Path BooleanEnabled Is the
path
using dynamic expression, function or system variables? Defaults tofalse
.
DatasetDelimitedTextSchemaColumn, DatasetDelimitedTextSchemaColumnArgs
- Name string
The name of the column.
- Description string
The description of the column.
- Type string
Type of the column. Valid values are
Byte
,Byte[]
,Boolean
,Date
,DateTime
,DateTimeOffset
,Decimal
,Double
,Guid
,Int16
,Int32
,Int64
,Single
,String
,TimeSpan
. Please note these values are case sensitive.
- Name string
The name of the column.
- Description string
The description of the column.
- Type string
Type of the column. Valid values are
Byte
,Byte[]
,Boolean
,Date
,DateTime
,DateTimeOffset
,Decimal
,Double
,Guid
,Int16
,Int32
,Int64
,Single
,String
,TimeSpan
. Please note these values are case sensitive.
- name String
The name of the column.
- description String
The description of the column.
- type String
Type of the column. Valid values are
Byte
,Byte[]
,Boolean
,Date
,DateTime
,DateTimeOffset
,Decimal
,Double
,Guid
,Int16
,Int32
,Int64
,Single
,String
,TimeSpan
. Please note these values are case sensitive.
- name string
The name of the column.
- description string
The description of the column.
- type string
Type of the column. Valid values are
Byte
,Byte[]
,Boolean
,Date
,DateTime
,DateTimeOffset
,Decimal
,Double
,Guid
,Int16
,Int32
,Int64
,Single
,String
,TimeSpan
. Please note these values are case sensitive.
- name str
The name of the column.
- description str
The description of the column.
- type str
Type of the column. Valid values are
Byte
,Byte[]
,Boolean
,Date
,DateTime
,DateTimeOffset
,Decimal
,Double
,Guid
,Int16
,Int32
,Int64
,Single
,String
,TimeSpan
. Please note these values are case sensitive.
- name String
The name of the column.
- description String
The description of the column.
- type String
Type of the column. Valid values are
Byte
,Byte[]
,Boolean
,Date
,DateTime
,DateTimeOffset
,Decimal
,Double
,Guid
,Int16
,Int32
,Int64
,Single
,String
,TimeSpan
. Please note these values are case sensitive.
Import
Data Factory Datasets can be imported using the resource id
, e.g.
$ pulumi import azure:datafactory/datasetDelimitedText:DatasetDelimitedText example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example
Package Details
- Repository
- Azure Classic pulumi/pulumi-azure
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
azurerm
Terraform Provider.