1. Packages
  2. Azure Classic
  3. API Docs
  4. datafactory
  5. DatasetParquet

We recommend using Azure Native.

Azure Classic v5.49.0 published on Tuesday, Aug 29, 2023 by Pulumi

azure.datafactory.DatasetParquet

Explore with Pulumi AI

azure logo

We recommend using Azure Native.

Azure Classic v5.49.0 published on Tuesday, Aug 29, 2023 by Pulumi

    Manages an Azure Parquet Dataset inside an Azure Data Factory.

    Example Usage

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Azure = Pulumi.Azure;
    
    return await Deployment.RunAsync(() => 
    {
        var exampleResourceGroup = new Azure.Core.ResourceGroup("exampleResourceGroup", new()
        {
            Location = "West Europe",
        });
    
        var exampleFactory = new Azure.DataFactory.Factory("exampleFactory", new()
        {
            Location = exampleResourceGroup.Location,
            ResourceGroupName = exampleResourceGroup.Name,
        });
    
        var exampleLinkedServiceWeb = new Azure.DataFactory.LinkedServiceWeb("exampleLinkedServiceWeb", new()
        {
            DataFactoryId = exampleFactory.Id,
            AuthenticationType = "Anonymous",
            Url = "https://www.bing.com",
        });
    
        var exampleDatasetParquet = new Azure.DataFactory.DatasetParquet("exampleDatasetParquet", new()
        {
            DataFactoryId = exampleFactory.Id,
            LinkedServiceName = exampleLinkedServiceWeb.Name,
            HttpServerLocation = new Azure.DataFactory.Inputs.DatasetParquetHttpServerLocationArgs
            {
                RelativeUrl = "http://www.bing.com",
                Path = "foo/bar/",
                Filename = "fizz.txt",
            },
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
    	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		exampleResourceGroup, err := core.NewResourceGroup(ctx, "exampleResourceGroup", &core.ResourceGroupArgs{
    			Location: pulumi.String("West Europe"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleFactory, err := datafactory.NewFactory(ctx, "exampleFactory", &datafactory.FactoryArgs{
    			Location:          exampleResourceGroup.Location,
    			ResourceGroupName: exampleResourceGroup.Name,
    		})
    		if err != nil {
    			return err
    		}
    		exampleLinkedServiceWeb, err := datafactory.NewLinkedServiceWeb(ctx, "exampleLinkedServiceWeb", &datafactory.LinkedServiceWebArgs{
    			DataFactoryId:      exampleFactory.ID(),
    			AuthenticationType: pulumi.String("Anonymous"),
    			Url:                pulumi.String("https://www.bing.com"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = datafactory.NewDatasetParquet(ctx, "exampleDatasetParquet", &datafactory.DatasetParquetArgs{
    			DataFactoryId:     exampleFactory.ID(),
    			LinkedServiceName: exampleLinkedServiceWeb.Name,
    			HttpServerLocation: &datafactory.DatasetParquetHttpServerLocationArgs{
    				RelativeUrl: pulumi.String("http://www.bing.com"),
    				Path:        pulumi.String("foo/bar/"),
    				Filename:    pulumi.String("fizz.txt"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.azure.core.ResourceGroup;
    import com.pulumi.azure.core.ResourceGroupArgs;
    import com.pulumi.azure.datafactory.Factory;
    import com.pulumi.azure.datafactory.FactoryArgs;
    import com.pulumi.azure.datafactory.LinkedServiceWeb;
    import com.pulumi.azure.datafactory.LinkedServiceWebArgs;
    import com.pulumi.azure.datafactory.DatasetParquet;
    import com.pulumi.azure.datafactory.DatasetParquetArgs;
    import com.pulumi.azure.datafactory.inputs.DatasetParquetHttpServerLocationArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()        
                .location("West Europe")
                .build());
    
            var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()        
                .location(exampleResourceGroup.location())
                .resourceGroupName(exampleResourceGroup.name())
                .build());
    
            var exampleLinkedServiceWeb = new LinkedServiceWeb("exampleLinkedServiceWeb", LinkedServiceWebArgs.builder()        
                .dataFactoryId(exampleFactory.id())
                .authenticationType("Anonymous")
                .url("https://www.bing.com")
                .build());
    
            var exampleDatasetParquet = new DatasetParquet("exampleDatasetParquet", DatasetParquetArgs.builder()        
                .dataFactoryId(exampleFactory.id())
                .linkedServiceName(exampleLinkedServiceWeb.name())
                .httpServerLocation(DatasetParquetHttpServerLocationArgs.builder()
                    .relativeUrl("http://www.bing.com")
                    .path("foo/bar/")
                    .filename("fizz.txt")
                    .build())
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_azure as azure
    
    example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
    example_factory = azure.datafactory.Factory("exampleFactory",
        location=example_resource_group.location,
        resource_group_name=example_resource_group.name)
    example_linked_service_web = azure.datafactory.LinkedServiceWeb("exampleLinkedServiceWeb",
        data_factory_id=example_factory.id,
        authentication_type="Anonymous",
        url="https://www.bing.com")
    example_dataset_parquet = azure.datafactory.DatasetParquet("exampleDatasetParquet",
        data_factory_id=example_factory.id,
        linked_service_name=example_linked_service_web.name,
        http_server_location=azure.datafactory.DatasetParquetHttpServerLocationArgs(
            relative_url="http://www.bing.com",
            path="foo/bar/",
            filename="fizz.txt",
        ))
    
    import * as pulumi from "@pulumi/pulumi";
    import * as azure from "@pulumi/azure";
    
    const exampleResourceGroup = new azure.core.ResourceGroup("exampleResourceGroup", {location: "West Europe"});
    const exampleFactory = new azure.datafactory.Factory("exampleFactory", {
        location: exampleResourceGroup.location,
        resourceGroupName: exampleResourceGroup.name,
    });
    const exampleLinkedServiceWeb = new azure.datafactory.LinkedServiceWeb("exampleLinkedServiceWeb", {
        dataFactoryId: exampleFactory.id,
        authenticationType: "Anonymous",
        url: "https://www.bing.com",
    });
    const exampleDatasetParquet = new azure.datafactory.DatasetParquet("exampleDatasetParquet", {
        dataFactoryId: exampleFactory.id,
        linkedServiceName: exampleLinkedServiceWeb.name,
        httpServerLocation: {
            relativeUrl: "http://www.bing.com",
            path: "foo/bar/",
            filename: "fizz.txt",
        },
    });
    
    resources:
      exampleResourceGroup:
        type: azure:core:ResourceGroup
        properties:
          location: West Europe
      exampleFactory:
        type: azure:datafactory:Factory
        properties:
          location: ${exampleResourceGroup.location}
          resourceGroupName: ${exampleResourceGroup.name}
      exampleLinkedServiceWeb:
        type: azure:datafactory:LinkedServiceWeb
        properties:
          dataFactoryId: ${exampleFactory.id}
          authenticationType: Anonymous
          url: https://www.bing.com
      exampleDatasetParquet:
        type: azure:datafactory:DatasetParquet
        properties:
          dataFactoryId: ${exampleFactory.id}
          linkedServiceName: ${exampleLinkedServiceWeb.name}
          httpServerLocation:
            relativeUrl: http://www.bing.com
            path: foo/bar/
            filename: fizz.txt
    

    Create DatasetParquet Resource

    new DatasetParquet(name: string, args: DatasetParquetArgs, opts?: CustomResourceOptions);
    @overload
    def DatasetParquet(resource_name: str,
                       opts: Optional[ResourceOptions] = None,
                       additional_properties: Optional[Mapping[str, str]] = None,
                       annotations: Optional[Sequence[str]] = None,
                       azure_blob_storage_location: Optional[DatasetParquetAzureBlobStorageLocationArgs] = None,
                       compression_codec: Optional[str] = None,
                       compression_level: Optional[str] = None,
                       data_factory_id: Optional[str] = None,
                       description: Optional[str] = None,
                       folder: Optional[str] = None,
                       http_server_location: Optional[DatasetParquetHttpServerLocationArgs] = None,
                       linked_service_name: Optional[str] = None,
                       name: Optional[str] = None,
                       parameters: Optional[Mapping[str, str]] = None,
                       schema_columns: Optional[Sequence[DatasetParquetSchemaColumnArgs]] = None)
    @overload
    def DatasetParquet(resource_name: str,
                       args: DatasetParquetArgs,
                       opts: Optional[ResourceOptions] = None)
    func NewDatasetParquet(ctx *Context, name string, args DatasetParquetArgs, opts ...ResourceOption) (*DatasetParquet, error)
    public DatasetParquet(string name, DatasetParquetArgs args, CustomResourceOptions? opts = null)
    public DatasetParquet(String name, DatasetParquetArgs args)
    public DatasetParquet(String name, DatasetParquetArgs args, CustomResourceOptions options)
    
    type: azure:datafactory:DatasetParquet
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    
    name string
    The unique name of the resource.
    args DatasetParquetArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args DatasetParquetArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args DatasetParquetArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args DatasetParquetArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args DatasetParquetArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    DatasetParquet Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The DatasetParquet resource accepts the following input properties:

    DataFactoryId string

    The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.

    LinkedServiceName string

    The Data Factory Linked Service name in which to associate the Dataset with.

    AdditionalProperties Dictionary<string, string>

    A map of additional properties to associate with the Data Factory Dataset.

    The following supported locations for a Parquet Dataset:

    Annotations List<string>

    List of tags that can be used for describing the Data Factory Dataset.

    AzureBlobStorageLocation DatasetParquetAzureBlobStorageLocation

    A azure_blob_storage_location block as defined below.

    The following supported arguments are specific to Parquet Dataset:

    CompressionCodec string

    The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.

    CompressionLevel string

    Specifies the compression level. Possible values are Optimal and Fastest,

    Description string

    The description for the Data Factory Dataset.

    Folder string

    The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

    HttpServerLocation DatasetParquetHttpServerLocation

    A http_server_location block as defined below.

    Name string

    Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

    Parameters Dictionary<string, string>

    A map of parameters to associate with the Data Factory Dataset.

    SchemaColumns List<DatasetParquetSchemaColumn>

    A schema_column block as defined below.

    DataFactoryId string

    The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.

    LinkedServiceName string

    The Data Factory Linked Service name in which to associate the Dataset with.

    AdditionalProperties map[string]string

    A map of additional properties to associate with the Data Factory Dataset.

    The following supported locations for a Parquet Dataset:

    Annotations []string

    List of tags that can be used for describing the Data Factory Dataset.

    AzureBlobStorageLocation DatasetParquetAzureBlobStorageLocationArgs

    A azure_blob_storage_location block as defined below.

    The following supported arguments are specific to Parquet Dataset:

    CompressionCodec string

    The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.

    CompressionLevel string

    Specifies the compression level. Possible values are Optimal and Fastest,

    Description string

    The description for the Data Factory Dataset.

    Folder string

    The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

    HttpServerLocation DatasetParquetHttpServerLocationArgs

    A http_server_location block as defined below.

    Name string

    Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

    Parameters map[string]string

    A map of parameters to associate with the Data Factory Dataset.

    SchemaColumns []DatasetParquetSchemaColumnArgs

    A schema_column block as defined below.

    dataFactoryId String

    The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.

    linkedServiceName String

    The Data Factory Linked Service name in which to associate the Dataset with.

    additionalProperties Map<String,String>

    A map of additional properties to associate with the Data Factory Dataset.

    The following supported locations for a Parquet Dataset:

    annotations List<String>

    List of tags that can be used for describing the Data Factory Dataset.

    azureBlobStorageLocation DatasetParquetAzureBlobStorageLocation

    A azure_blob_storage_location block as defined below.

    The following supported arguments are specific to Parquet Dataset:

    compressionCodec String

    The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.

    compressionLevel String

    Specifies the compression level. Possible values are Optimal and Fastest,

    description String

    The description for the Data Factory Dataset.

    folder String

    The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

    httpServerLocation DatasetParquetHttpServerLocation

    A http_server_location block as defined below.

    name String

    Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

    parameters Map<String,String>

    A map of parameters to associate with the Data Factory Dataset.

    schemaColumns List<DatasetParquetSchemaColumn>

    A schema_column block as defined below.

    dataFactoryId string

    The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.

    linkedServiceName string

    The Data Factory Linked Service name in which to associate the Dataset with.

    additionalProperties {[key: string]: string}

    A map of additional properties to associate with the Data Factory Dataset.

    The following supported locations for a Parquet Dataset:

    annotations string[]

    List of tags that can be used for describing the Data Factory Dataset.

    azureBlobStorageLocation DatasetParquetAzureBlobStorageLocation

    A azure_blob_storage_location block as defined below.

    The following supported arguments are specific to Parquet Dataset:

    compressionCodec string

    The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.

    compressionLevel string

    Specifies the compression level. Possible values are Optimal and Fastest,

    description string

    The description for the Data Factory Dataset.

    folder string

    The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

    httpServerLocation DatasetParquetHttpServerLocation

    A http_server_location block as defined below.

    name string

    Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

    parameters {[key: string]: string}

    A map of parameters to associate with the Data Factory Dataset.

    schemaColumns DatasetParquetSchemaColumn[]

    A schema_column block as defined below.

    data_factory_id str

    The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.

    linked_service_name str

    The Data Factory Linked Service name in which to associate the Dataset with.

    additional_properties Mapping[str, str]

    A map of additional properties to associate with the Data Factory Dataset.

    The following supported locations for a Parquet Dataset:

    annotations Sequence[str]

    List of tags that can be used for describing the Data Factory Dataset.

    azure_blob_storage_location DatasetParquetAzureBlobStorageLocationArgs

    A azure_blob_storage_location block as defined below.

    The following supported arguments are specific to Parquet Dataset:

    compression_codec str

    The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.

    compression_level str

    Specifies the compression level. Possible values are Optimal and Fastest,

    description str

    The description for the Data Factory Dataset.

    folder str

    The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

    http_server_location DatasetParquetHttpServerLocationArgs

    A http_server_location block as defined below.

    name str

    Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

    parameters Mapping[str, str]

    A map of parameters to associate with the Data Factory Dataset.

    schema_columns Sequence[DatasetParquetSchemaColumnArgs]

    A schema_column block as defined below.

    dataFactoryId String

    The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.

    linkedServiceName String

    The Data Factory Linked Service name in which to associate the Dataset with.

    additionalProperties Map<String>

    A map of additional properties to associate with the Data Factory Dataset.

    The following supported locations for a Parquet Dataset:

    annotations List<String>

    List of tags that can be used for describing the Data Factory Dataset.

    azureBlobStorageLocation Property Map

    A azure_blob_storage_location block as defined below.

    The following supported arguments are specific to Parquet Dataset:

    compressionCodec String

    The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.

    compressionLevel String

    Specifies the compression level. Possible values are Optimal and Fastest,

    description String

    The description for the Data Factory Dataset.

    folder String

    The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

    httpServerLocation Property Map

    A http_server_location block as defined below.

    name String

    Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

    parameters Map<String>

    A map of parameters to associate with the Data Factory Dataset.

    schemaColumns List<Property Map>

    A schema_column block as defined below.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the DatasetParquet resource produces the following output properties:

    Id string

    The provider-assigned unique ID for this managed resource.

    Id string

    The provider-assigned unique ID for this managed resource.

    id String

    The provider-assigned unique ID for this managed resource.

    id string

    The provider-assigned unique ID for this managed resource.

    id str

    The provider-assigned unique ID for this managed resource.

    id String

    The provider-assigned unique ID for this managed resource.

    Look up Existing DatasetParquet Resource

    Get an existing DatasetParquet resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: DatasetParquetState, opts?: CustomResourceOptions): DatasetParquet
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            additional_properties: Optional[Mapping[str, str]] = None,
            annotations: Optional[Sequence[str]] = None,
            azure_blob_storage_location: Optional[DatasetParquetAzureBlobStorageLocationArgs] = None,
            compression_codec: Optional[str] = None,
            compression_level: Optional[str] = None,
            data_factory_id: Optional[str] = None,
            description: Optional[str] = None,
            folder: Optional[str] = None,
            http_server_location: Optional[DatasetParquetHttpServerLocationArgs] = None,
            linked_service_name: Optional[str] = None,
            name: Optional[str] = None,
            parameters: Optional[Mapping[str, str]] = None,
            schema_columns: Optional[Sequence[DatasetParquetSchemaColumnArgs]] = None) -> DatasetParquet
    func GetDatasetParquet(ctx *Context, name string, id IDInput, state *DatasetParquetState, opts ...ResourceOption) (*DatasetParquet, error)
    public static DatasetParquet Get(string name, Input<string> id, DatasetParquetState? state, CustomResourceOptions? opts = null)
    public static DatasetParquet get(String name, Output<String> id, DatasetParquetState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AdditionalProperties Dictionary<string, string>

    A map of additional properties to associate with the Data Factory Dataset.

    The following supported locations for a Parquet Dataset:

    Annotations List<string>

    List of tags that can be used for describing the Data Factory Dataset.

    AzureBlobStorageLocation DatasetParquetAzureBlobStorageLocation

    A azure_blob_storage_location block as defined below.

    The following supported arguments are specific to Parquet Dataset:

    CompressionCodec string

    The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.

    CompressionLevel string

    Specifies the compression level. Possible values are Optimal and Fastest,

    DataFactoryId string

    The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.

    Description string

    The description for the Data Factory Dataset.

    Folder string

    The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

    HttpServerLocation DatasetParquetHttpServerLocation

    A http_server_location block as defined below.

    LinkedServiceName string

    The Data Factory Linked Service name in which to associate the Dataset with.

    Name string

    Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

    Parameters Dictionary<string, string>

    A map of parameters to associate with the Data Factory Dataset.

    SchemaColumns List<DatasetParquetSchemaColumn>

    A schema_column block as defined below.

    AdditionalProperties map[string]string

    A map of additional properties to associate with the Data Factory Dataset.

    The following supported locations for a Parquet Dataset:

    Annotations []string

    List of tags that can be used for describing the Data Factory Dataset.

    AzureBlobStorageLocation DatasetParquetAzureBlobStorageLocationArgs

    A azure_blob_storage_location block as defined below.

    The following supported arguments are specific to Parquet Dataset:

    CompressionCodec string

    The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.

    CompressionLevel string

    Specifies the compression level. Possible values are Optimal and Fastest,

    DataFactoryId string

    The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.

    Description string

    The description for the Data Factory Dataset.

    Folder string

    The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

    HttpServerLocation DatasetParquetHttpServerLocationArgs

    A http_server_location block as defined below.

    LinkedServiceName string

    The Data Factory Linked Service name in which to associate the Dataset with.

    Name string

    Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

    Parameters map[string]string

    A map of parameters to associate with the Data Factory Dataset.

    SchemaColumns []DatasetParquetSchemaColumnArgs

    A schema_column block as defined below.

    additionalProperties Map<String,String>

    A map of additional properties to associate with the Data Factory Dataset.

    The following supported locations for a Parquet Dataset:

    annotations List<String>

    List of tags that can be used for describing the Data Factory Dataset.

    azureBlobStorageLocation DatasetParquetAzureBlobStorageLocation

    A azure_blob_storage_location block as defined below.

    The following supported arguments are specific to Parquet Dataset:

    compressionCodec String

    The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.

    compressionLevel String

    Specifies the compression level. Possible values are Optimal and Fastest,

    dataFactoryId String

    The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.

    description String

    The description for the Data Factory Dataset.

    folder String

    The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

    httpServerLocation DatasetParquetHttpServerLocation

    A http_server_location block as defined below.

    linkedServiceName String

    The Data Factory Linked Service name in which to associate the Dataset with.

    name String

    Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

    parameters Map<String,String>

    A map of parameters to associate with the Data Factory Dataset.

    schemaColumns List<DatasetParquetSchemaColumn>

    A schema_column block as defined below.

    additionalProperties {[key: string]: string}

    A map of additional properties to associate with the Data Factory Dataset.

    The following supported locations for a Parquet Dataset:

    annotations string[]

    List of tags that can be used for describing the Data Factory Dataset.

    azureBlobStorageLocation DatasetParquetAzureBlobStorageLocation

    A azure_blob_storage_location block as defined below.

    The following supported arguments are specific to Parquet Dataset:

    compressionCodec string

    The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.

    compressionLevel string

    Specifies the compression level. Possible values are Optimal and Fastest,

    dataFactoryId string

    The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.

    description string

    The description for the Data Factory Dataset.

    folder string

    The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

    httpServerLocation DatasetParquetHttpServerLocation

    A http_server_location block as defined below.

    linkedServiceName string

    The Data Factory Linked Service name in which to associate the Dataset with.

    name string

    Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

    parameters {[key: string]: string}

    A map of parameters to associate with the Data Factory Dataset.

    schemaColumns DatasetParquetSchemaColumn[]

    A schema_column block as defined below.

    additional_properties Mapping[str, str]

    A map of additional properties to associate with the Data Factory Dataset.

    The following supported locations for a Parquet Dataset:

    annotations Sequence[str]

    List of tags that can be used for describing the Data Factory Dataset.

    azure_blob_storage_location DatasetParquetAzureBlobStorageLocationArgs

    A azure_blob_storage_location block as defined below.

    The following supported arguments are specific to Parquet Dataset:

    compression_codec str

    The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.

    compression_level str

    Specifies the compression level. Possible values are Optimal and Fastest,

    data_factory_id str

    The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.

    description str

    The description for the Data Factory Dataset.

    folder str

    The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

    http_server_location DatasetParquetHttpServerLocationArgs

    A http_server_location block as defined below.

    linked_service_name str

    The Data Factory Linked Service name in which to associate the Dataset with.

    name str

    Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

    parameters Mapping[str, str]

    A map of parameters to associate with the Data Factory Dataset.

    schema_columns Sequence[DatasetParquetSchemaColumnArgs]

    A schema_column block as defined below.

    additionalProperties Map<String>

    A map of additional properties to associate with the Data Factory Dataset.

    The following supported locations for a Parquet Dataset:

    annotations List<String>

    List of tags that can be used for describing the Data Factory Dataset.

    azureBlobStorageLocation Property Map

    A azure_blob_storage_location block as defined below.

    The following supported arguments are specific to Parquet Dataset:

    compressionCodec String

    The compression codec used to read/write text files. Valid values are bzip2, gzip, deflate, ZipDeflate, TarGzip, Tar, snappy, or lz4. Please note these values are case-sensitive.

    compressionLevel String

    Specifies the compression level. Possible values are Optimal and Fastest,

    dataFactoryId String

    The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.

    description String

    The description for the Data Factory Dataset.

    folder String

    The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.

    httpServerLocation Property Map

    A http_server_location block as defined below.

    linkedServiceName String

    The Data Factory Linked Service name in which to associate the Dataset with.

    name String

    Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.

    parameters Map<String>

    A map of parameters to associate with the Data Factory Dataset.

    schemaColumns List<Property Map>

    A schema_column block as defined below.

    Supporting Types

    DatasetParquetAzureBlobStorageLocation, DatasetParquetAzureBlobStorageLocationArgs

    Container string

    The container on the Azure Blob Storage Account hosting the file.

    DynamicContainerEnabled bool

    Is the container using dynamic expression, function or system variables? Defaults to false.

    DynamicFilenameEnabled bool

    Is the filename using dynamic expression, function or system variables? Defaults to false.

    DynamicPathEnabled bool

    Is the path using dynamic expression, function or system variables? Defaults to false.

    Filename string

    The filename of the file on the web server.

    Path string

    The folder path to the file on the web server.

    Container string

    The container on the Azure Blob Storage Account hosting the file.

    DynamicContainerEnabled bool

    Is the container using dynamic expression, function or system variables? Defaults to false.

    DynamicFilenameEnabled bool

    Is the filename using dynamic expression, function or system variables? Defaults to false.

    DynamicPathEnabled bool

    Is the path using dynamic expression, function or system variables? Defaults to false.

    Filename string

    The filename of the file on the web server.

    Path string

    The folder path to the file on the web server.

    container String

    The container on the Azure Blob Storage Account hosting the file.

    dynamicContainerEnabled Boolean

    Is the container using dynamic expression, function or system variables? Defaults to false.

    dynamicFilenameEnabled Boolean

    Is the filename using dynamic expression, function or system variables? Defaults to false.

    dynamicPathEnabled Boolean

    Is the path using dynamic expression, function or system variables? Defaults to false.

    filename String

    The filename of the file on the web server.

    path String

    The folder path to the file on the web server.

    container string

    The container on the Azure Blob Storage Account hosting the file.

    dynamicContainerEnabled boolean

    Is the container using dynamic expression, function or system variables? Defaults to false.

    dynamicFilenameEnabled boolean

    Is the filename using dynamic expression, function or system variables? Defaults to false.

    dynamicPathEnabled boolean

    Is the path using dynamic expression, function or system variables? Defaults to false.

    filename string

    The filename of the file on the web server.

    path string

    The folder path to the file on the web server.

    container str

    The container on the Azure Blob Storage Account hosting the file.

    dynamic_container_enabled bool

    Is the container using dynamic expression, function or system variables? Defaults to false.

    dynamic_filename_enabled bool

    Is the filename using dynamic expression, function or system variables? Defaults to false.

    dynamic_path_enabled bool

    Is the path using dynamic expression, function or system variables? Defaults to false.

    filename str

    The filename of the file on the web server.

    path str

    The folder path to the file on the web server.

    container String

    The container on the Azure Blob Storage Account hosting the file.

    dynamicContainerEnabled Boolean

    Is the container using dynamic expression, function or system variables? Defaults to false.

    dynamicFilenameEnabled Boolean

    Is the filename using dynamic expression, function or system variables? Defaults to false.

    dynamicPathEnabled Boolean

    Is the path using dynamic expression, function or system variables? Defaults to false.

    filename String

    The filename of the file on the web server.

    path String

    The folder path to the file on the web server.

    DatasetParquetHttpServerLocation, DatasetParquetHttpServerLocationArgs

    Filename string

    The filename of the file on the web server.

    RelativeUrl string

    The base URL to the web server hosting the file.

    DynamicFilenameEnabled bool

    Is the filename using dynamic expression, function or system variables? Defaults to false.

    DynamicPathEnabled bool

    Is the path using dynamic expression, function or system variables? Defaults to false.

    Path string

    The folder path to the file on the web server.

    Filename string

    The filename of the file on the web server.

    RelativeUrl string

    The base URL to the web server hosting the file.

    DynamicFilenameEnabled bool

    Is the filename using dynamic expression, function or system variables? Defaults to false.

    DynamicPathEnabled bool

    Is the path using dynamic expression, function or system variables? Defaults to false.

    Path string

    The folder path to the file on the web server.

    filename String

    The filename of the file on the web server.

    relativeUrl String

    The base URL to the web server hosting the file.

    dynamicFilenameEnabled Boolean

    Is the filename using dynamic expression, function or system variables? Defaults to false.

    dynamicPathEnabled Boolean

    Is the path using dynamic expression, function or system variables? Defaults to false.

    path String

    The folder path to the file on the web server.

    filename string

    The filename of the file on the web server.

    relativeUrl string

    The base URL to the web server hosting the file.

    dynamicFilenameEnabled boolean

    Is the filename using dynamic expression, function or system variables? Defaults to false.

    dynamicPathEnabled boolean

    Is the path using dynamic expression, function or system variables? Defaults to false.

    path string

    The folder path to the file on the web server.

    filename str

    The filename of the file on the web server.

    relative_url str

    The base URL to the web server hosting the file.

    dynamic_filename_enabled bool

    Is the filename using dynamic expression, function or system variables? Defaults to false.

    dynamic_path_enabled bool

    Is the path using dynamic expression, function or system variables? Defaults to false.

    path str

    The folder path to the file on the web server.

    filename String

    The filename of the file on the web server.

    relativeUrl String

    The base URL to the web server hosting the file.

    dynamicFilenameEnabled Boolean

    Is the filename using dynamic expression, function or system variables? Defaults to false.

    dynamicPathEnabled Boolean

    Is the path using dynamic expression, function or system variables? Defaults to false.

    path String

    The folder path to the file on the web server.

    DatasetParquetSchemaColumn, DatasetParquetSchemaColumnArgs

    Name string

    The name of the column.

    Description string

    The description of the column.

    Type string

    Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

    Name string

    The name of the column.

    Description string

    The description of the column.

    Type string

    Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

    name String

    The name of the column.

    description String

    The description of the column.

    type String

    Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

    name string

    The name of the column.

    description string

    The description of the column.

    type string

    Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

    name str

    The name of the column.

    description str

    The description of the column.

    type str

    Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

    name String

    The name of the column.

    description String

    The description of the column.

    type String

    Type of the column. Valid values are Byte, Byte[], Boolean, Date, DateTime,DateTimeOffset, Decimal, Double, Guid, Int16, Int32, Int64, Single, String, TimeSpan. Please note these values are case sensitive.

    Import

    Data Factory Datasets can be imported using the resource id, e.g.

     $ pulumi import azure:datafactory/datasetParquet:DatasetParquet example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example
    

    Package Details

    Repository
    Azure Classic pulumi/pulumi-azure
    License
    Apache-2.0
    Notes

    This Pulumi package is based on the azurerm Terraform Provider.

    azure logo

    We recommend using Azure Native.

    Azure Classic v5.49.0 published on Tuesday, Aug 29, 2023 by Pulumi