1. Packages
  2. Airbyte Provider
  3. API Docs
  4. SourceAzureBlobStorage
airbyte 1.0.0-rc6 published on Monday, Feb 16, 2026 by airbytehq
airbyte logo
airbyte 1.0.0-rc6 published on Monday, Feb 16, 2026 by airbytehq

    SourceAzureBlobStorage Resource

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as airbyte from "@pulumi/airbyte";
    
    const mySourceAzureblobstorage = new airbyte.SourceAzureBlobStorage("my_source_azureblobstorage", {
        configuration: {
            azureBlobStorageAccountName: "airbyte5storage",
            azureBlobStorageContainerName: "airbytetescontainername",
            azureBlobStorageEndpoint: "blob.core.windows.net",
            credentials: {
                authenticateViaStorageAccountKey: {
                    azureBlobStorageAccountKey: "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==",
                },
            },
            deliveryMethod: {
                copyRawFiles: {
                    preserveDirectoryStructure: false,
                },
            },
            startDate: "2021-01-01T00:00:00.000000Z",
            streams: [{
                daysToSyncIfHistoryIsFull: 1,
                format: {
                    excelFormat: {},
                    jsonlFormat: {},
                },
                globs: ["..."],
                inputSchema: "...my_input_schema...",
                legacyPrefix: "...my_legacy_prefix...",
                name: "...my_name...",
                primaryKey: "...my_primary_key...",
                recentNFilesToReadForSchemaDiscovery: 2,
                schemaless: true,
                useFirstFoundFileForSchemaDiscovery: false,
                validationPolicy: "Wait for Discover",
            }],
        },
        definitionId: "3385920f-d837-42e0-b72d-7927f28bf9f2",
        name: "...my_name...",
        secretId: "...my_secret_id...",
        workspaceId: "2c3aeaad-c70f-44a8-a981-aca12752c864",
    });
    
    import pulumi
    import pulumi_airbyte as airbyte
    
    my_source_azureblobstorage = airbyte.SourceAzureBlobStorage("my_source_azureblobstorage",
        configuration={
            "azure_blob_storage_account_name": "airbyte5storage",
            "azure_blob_storage_container_name": "airbytetescontainername",
            "azure_blob_storage_endpoint": "blob.core.windows.net",
            "credentials": {
                "authenticate_via_storage_account_key": {
                    "azure_blob_storage_account_key": "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==",
                },
            },
            "delivery_method": {
                "copy_raw_files": {
                    "preserve_directory_structure": False,
                },
            },
            "start_date": "2021-01-01T00:00:00.000000Z",
            "streams": [{
                "days_to_sync_if_history_is_full": 1,
                "format": {
                    "excel_format": {},
                    "jsonl_format": {},
                },
                "globs": ["..."],
                "input_schema": "...my_input_schema...",
                "legacy_prefix": "...my_legacy_prefix...",
                "name": "...my_name...",
                "primary_key": "...my_primary_key...",
                "recent_n_files_to_read_for_schema_discovery": 2,
                "schemaless": True,
                "use_first_found_file_for_schema_discovery": False,
                "validation_policy": "Wait for Discover",
            }],
        },
        definition_id="3385920f-d837-42e0-b72d-7927f28bf9f2",
        name="...my_name...",
        secret_id="...my_secret_id...",
        workspace_id="2c3aeaad-c70f-44a8-a981-aca12752c864")
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-terraform-provider/sdks/go/airbyte/airbyte"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := airbyte.NewSourceAzureBlobStorage(ctx, "my_source_azureblobstorage", &airbyte.SourceAzureBlobStorageArgs{
    			Configuration: &airbyte.SourceAzureBlobStorageConfigurationArgs{
    				AzureBlobStorageAccountName:   pulumi.String("airbyte5storage"),
    				AzureBlobStorageContainerName: pulumi.String("airbytetescontainername"),
    				AzureBlobStorageEndpoint:      pulumi.String("blob.core.windows.net"),
    				Credentials: &airbyte.SourceAzureBlobStorageConfigurationCredentialsArgs{
    					AuthenticateViaStorageAccountKey: &airbyte.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs{
    						AzureBlobStorageAccountKey: pulumi.String("Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd=="),
    					},
    				},
    				DeliveryMethod: &airbyte.SourceAzureBlobStorageConfigurationDeliveryMethodArgs{
    					CopyRawFiles: &airbyte.SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFilesArgs{
    						PreserveDirectoryStructure: pulumi.Bool(false),
    					},
    				},
    				StartDate: pulumi.String("2021-01-01T00:00:00.000000Z"),
    				Streams: airbyte.SourceAzureBlobStorageConfigurationStreamArray{
    					&airbyte.SourceAzureBlobStorageConfigurationStreamArgs{
    						DaysToSyncIfHistoryIsFull: pulumi.Float64(1),
    						Format: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatArgs{
    							ExcelFormat: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatExcelFormatArgs{},
    							JsonlFormat: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatJsonlFormatArgs{},
    						},
    						Globs: pulumi.StringArray{
    							pulumi.String("..."),
    						},
    						InputSchema:                          pulumi.String("...my_input_schema..."),
    						LegacyPrefix:                         pulumi.String("...my_legacy_prefix..."),
    						Name:                                 pulumi.String("...my_name..."),
    						PrimaryKey:                           pulumi.String("...my_primary_key..."),
    						RecentNFilesToReadForSchemaDiscovery: pulumi.Float64(2),
    						Schemaless:                           pulumi.Bool(true),
    						UseFirstFoundFileForSchemaDiscovery:  pulumi.Bool(false),
    						ValidationPolicy:                     pulumi.String("Wait for Discover"),
    					},
    				},
    			},
    			DefinitionId: pulumi.String("3385920f-d837-42e0-b72d-7927f28bf9f2"),
    			Name:         pulumi.String("...my_name..."),
    			SecretId:     pulumi.String("...my_secret_id..."),
    			WorkspaceId:  pulumi.String("2c3aeaad-c70f-44a8-a981-aca12752c864"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Airbyte = Pulumi.Airbyte;
    
    return await Deployment.RunAsync(() => 
    {
        var mySourceAzureblobstorage = new Airbyte.SourceAzureBlobStorage("my_source_azureblobstorage", new()
        {
            Configuration = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationArgs
            {
                AzureBlobStorageAccountName = "airbyte5storage",
                AzureBlobStorageContainerName = "airbytetescontainername",
                AzureBlobStorageEndpoint = "blob.core.windows.net",
                Credentials = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationCredentialsArgs
                {
                    AuthenticateViaStorageAccountKey = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs
                    {
                        AzureBlobStorageAccountKey = "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==",
                    },
                },
                DeliveryMethod = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationDeliveryMethodArgs
                {
                    CopyRawFiles = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFilesArgs
                    {
                        PreserveDirectoryStructure = false,
                    },
                },
                StartDate = "2021-01-01T00:00:00.000000Z",
                Streams = new[]
                {
                    new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamArgs
                    {
                        DaysToSyncIfHistoryIsFull = 1,
                        Format = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatArgs
                        {
                            ExcelFormat = null,
                            JsonlFormat = null,
                        },
                        Globs = new[]
                        {
                            "...",
                        },
                        InputSchema = "...my_input_schema...",
                        LegacyPrefix = "...my_legacy_prefix...",
                        Name = "...my_name...",
                        PrimaryKey = "...my_primary_key...",
                        RecentNFilesToReadForSchemaDiscovery = 2,
                        Schemaless = true,
                        UseFirstFoundFileForSchemaDiscovery = false,
                        ValidationPolicy = "Wait for Discover",
                    },
                },
            },
            DefinitionId = "3385920f-d837-42e0-b72d-7927f28bf9f2",
            Name = "...my_name...",
            SecretId = "...my_secret_id...",
            WorkspaceId = "2c3aeaad-c70f-44a8-a981-aca12752c864",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.airbyte.SourceAzureBlobStorage;
    import com.pulumi.airbyte.SourceAzureBlobStorageArgs;
    import com.pulumi.airbyte.inputs.SourceAzureBlobStorageConfigurationArgs;
    import com.pulumi.airbyte.inputs.SourceAzureBlobStorageConfigurationCredentialsArgs;
    import com.pulumi.airbyte.inputs.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs;
    import com.pulumi.airbyte.inputs.SourceAzureBlobStorageConfigurationDeliveryMethodArgs;
    import com.pulumi.airbyte.inputs.SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFilesArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var mySourceAzureblobstorage = new SourceAzureBlobStorage("mySourceAzureblobstorage", SourceAzureBlobStorageArgs.builder()
                .configuration(SourceAzureBlobStorageConfigurationArgs.builder()
                    .azureBlobStorageAccountName("airbyte5storage")
                    .azureBlobStorageContainerName("airbytetescontainername")
                    .azureBlobStorageEndpoint("blob.core.windows.net")
                    .credentials(SourceAzureBlobStorageConfigurationCredentialsArgs.builder()
                        .authenticateViaStorageAccountKey(SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs.builder()
                            .azureBlobStorageAccountKey("Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==")
                            .build())
                        .build())
                    .deliveryMethod(SourceAzureBlobStorageConfigurationDeliveryMethodArgs.builder()
                        .copyRawFiles(SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFilesArgs.builder()
                            .preserveDirectoryStructure(false)
                            .build())
                        .build())
                    .startDate("2021-01-01T00:00:00.000000Z")
                    .streams(SourceAzureBlobStorageConfigurationStreamArgs.builder()
                        .daysToSyncIfHistoryIsFull(1.0)
                        .format(SourceAzureBlobStorageConfigurationStreamFormatArgs.builder()
                            .excelFormat(SourceAzureBlobStorageConfigurationStreamFormatExcelFormatArgs.builder()
                                .build())
                            .jsonlFormat(SourceAzureBlobStorageConfigurationStreamFormatJsonlFormatArgs.builder()
                                .build())
                            .build())
                        .globs("...")
                        .inputSchema("...my_input_schema...")
                        .legacyPrefix("...my_legacy_prefix...")
                        .name("...my_name...")
                        .primaryKey("...my_primary_key...")
                        .recentNFilesToReadForSchemaDiscovery(2.0)
                        .schemaless(true)
                        .useFirstFoundFileForSchemaDiscovery(false)
                        .validationPolicy("Wait for Discover")
                        .build())
                    .build())
                .definitionId("3385920f-d837-42e0-b72d-7927f28bf9f2")
                .name("...my_name...")
                .secretId("...my_secret_id...")
                .workspaceId("2c3aeaad-c70f-44a8-a981-aca12752c864")
                .build());
    
        }
    }
    
    resources:
      mySourceAzureblobstorage:
        type: airbyte:SourceAzureBlobStorage
        name: my_source_azureblobstorage
        properties:
          configuration:
            azureBlobStorageAccountName: airbyte5storage
            azureBlobStorageContainerName: airbytetescontainername
            azureBlobStorageEndpoint: blob.core.windows.net
            credentials:
              authenticateViaStorageAccountKey:
                azureBlobStorageAccountKey: Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==
            deliveryMethod:
              copyRawFiles:
                preserveDirectoryStructure: false
            startDate: 2021-01-01T00:00:00.000000Z
            streams:
              - daysToSyncIfHistoryIsFull: 1
                format:
                  excelFormat: {}
                  jsonlFormat: {}
                globs:
                  - '...'
                inputSchema: '...my_input_schema...'
                legacyPrefix: '...my_legacy_prefix...'
                name: '...my_name...'
                primaryKey: '...my_primary_key...'
                recentNFilesToReadForSchemaDiscovery: 2
                schemaless: true
                useFirstFoundFileForSchemaDiscovery: false
                validationPolicy: Wait for Discover
          definitionId: 3385920f-d837-42e0-b72d-7927f28bf9f2
          name: '...my_name...'
          secretId: '...my_secret_id...'
          workspaceId: 2c3aeaad-c70f-44a8-a981-aca12752c864
    

    Create SourceAzureBlobStorage Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new SourceAzureBlobStorage(name: string, args: SourceAzureBlobStorageArgs, opts?: CustomResourceOptions);
    @overload
    def SourceAzureBlobStorage(resource_name: str,
                               args: SourceAzureBlobStorageArgs,
                               opts: Optional[ResourceOptions] = None)
    
    @overload
    def SourceAzureBlobStorage(resource_name: str,
                               opts: Optional[ResourceOptions] = None,
                               configuration: Optional[SourceAzureBlobStorageConfigurationArgs] = None,
                               workspace_id: Optional[str] = None,
                               definition_id: Optional[str] = None,
                               name: Optional[str] = None,
                               secret_id: Optional[str] = None)
    func NewSourceAzureBlobStorage(ctx *Context, name string, args SourceAzureBlobStorageArgs, opts ...ResourceOption) (*SourceAzureBlobStorage, error)
    public SourceAzureBlobStorage(string name, SourceAzureBlobStorageArgs args, CustomResourceOptions? opts = null)
    public SourceAzureBlobStorage(String name, SourceAzureBlobStorageArgs args)
    public SourceAzureBlobStorage(String name, SourceAzureBlobStorageArgs args, CustomResourceOptions options)
    
    type: airbyte:SourceAzureBlobStorage
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args SourceAzureBlobStorageArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args SourceAzureBlobStorageArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args SourceAzureBlobStorageArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args SourceAzureBlobStorageArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args SourceAzureBlobStorageArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var sourceAzureBlobStorageResource = new Airbyte.SourceAzureBlobStorage("sourceAzureBlobStorageResource", new()
    {
        Configuration = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationArgs
        {
            AzureBlobStorageAccountName = "string",
            AzureBlobStorageContainerName = "string",
            Credentials = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationCredentialsArgs
            {
                AuthenticateViaClientCredentials = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaClientCredentialsArgs
                {
                    AppClientId = "string",
                    AppClientSecret = "string",
                    AppTenantId = "string",
                },
                AuthenticateViaOauth2 = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaOauth2Args
                {
                    ClientId = "string",
                    ClientSecret = "string",
                    RefreshToken = "string",
                    TenantId = "string",
                },
                AuthenticateViaStorageAccountKey = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs
                {
                    AzureBlobStorageAccountKey = "string",
                },
            },
            Streams = new[]
            {
                new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamArgs
                {
                    Format = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatArgs
                    {
                        AvroFormat = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatAvroFormatArgs
                        {
                            DoubleAsString = false,
                        },
                        CsvFormat = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatArgs
                        {
                            Delimiter = "string",
                            DoubleQuote = false,
                            Encoding = "string",
                            EscapeChar = "string",
                            FalseValues = new[]
                            {
                                "string",
                            },
                            HeaderDefinition = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionArgs
                            {
                                Autogenerated = null,
                                FromCsv = null,
                                UserProvided = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs
                                {
                                    ColumnNames = new[]
                                    {
                                        "string",
                                    },
                                },
                            },
                            IgnoreErrorsOnFieldsMismatch = false,
                            InferenceType = "string",
                            NullValues = new[]
                            {
                                "string",
                            },
                            QuoteChar = "string",
                            SkipRowsAfterHeader = 0,
                            SkipRowsBeforeHeader = 0,
                            StringsCanBeNull = false,
                            TrueValues = new[]
                            {
                                "string",
                            },
                        },
                        ExcelFormat = null,
                        JsonlFormat = null,
                        ParquetFormat = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatParquetFormatArgs
                        {
                            DecimalAsFloat = false,
                        },
                        UnstructuredDocumentFormat = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatArgs
                        {
                            Processing = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs
                            {
                                Local = null,
                            },
                            SkipUnprocessableFiles = false,
                            Strategy = "string",
                        },
                    },
                    Name = "string",
                    DaysToSyncIfHistoryIsFull = 0,
                    Globs = new[]
                    {
                        "string",
                    },
                    InputSchema = "string",
                    LegacyPrefix = "string",
                    PrimaryKey = "string",
                    RecentNFilesToReadForSchemaDiscovery = 0,
                    Schemaless = false,
                    UseFirstFoundFileForSchemaDiscovery = false,
                    ValidationPolicy = "string",
                },
            },
            AzureBlobStorageEndpoint = "string",
            DeliveryMethod = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationDeliveryMethodArgs
            {
                CopyRawFiles = new Airbyte.Inputs.SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFilesArgs
                {
                    PreserveDirectoryStructure = false,
                },
                ReplicateRecords = null,
            },
            StartDate = "string",
        },
        WorkspaceId = "string",
        DefinitionId = "string",
        Name = "string",
        SecretId = "string",
    });
    
    example, err := airbyte.NewSourceAzureBlobStorage(ctx, "sourceAzureBlobStorageResource", &airbyte.SourceAzureBlobStorageArgs{
    	Configuration: &airbyte.SourceAzureBlobStorageConfigurationArgs{
    		AzureBlobStorageAccountName:   pulumi.String("string"),
    		AzureBlobStorageContainerName: pulumi.String("string"),
    		Credentials: &airbyte.SourceAzureBlobStorageConfigurationCredentialsArgs{
    			AuthenticateViaClientCredentials: &airbyte.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaClientCredentialsArgs{
    				AppClientId:     pulumi.String("string"),
    				AppClientSecret: pulumi.String("string"),
    				AppTenantId:     pulumi.String("string"),
    			},
    			AuthenticateViaOauth2: &airbyte.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaOauth2Args{
    				ClientId:     pulumi.String("string"),
    				ClientSecret: pulumi.String("string"),
    				RefreshToken: pulumi.String("string"),
    				TenantId:     pulumi.String("string"),
    			},
    			AuthenticateViaStorageAccountKey: &airbyte.SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs{
    				AzureBlobStorageAccountKey: pulumi.String("string"),
    			},
    		},
    		Streams: airbyte.SourceAzureBlobStorageConfigurationStreamArray{
    			&airbyte.SourceAzureBlobStorageConfigurationStreamArgs{
    				Format: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatArgs{
    					AvroFormat: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatAvroFormatArgs{
    						DoubleAsString: pulumi.Bool(false),
    					},
    					CsvFormat: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatArgs{
    						Delimiter:   pulumi.String("string"),
    						DoubleQuote: pulumi.Bool(false),
    						Encoding:    pulumi.String("string"),
    						EscapeChar:  pulumi.String("string"),
    						FalseValues: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						HeaderDefinition: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionArgs{
    							Autogenerated: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionAutogeneratedArgs{},
    							FromCsv:       &airbyte.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionFromCsvArgs{},
    							UserProvided: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs{
    								ColumnNames: pulumi.StringArray{
    									pulumi.String("string"),
    								},
    							},
    						},
    						IgnoreErrorsOnFieldsMismatch: pulumi.Bool(false),
    						InferenceType:                pulumi.String("string"),
    						NullValues: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						QuoteChar:            pulumi.String("string"),
    						SkipRowsAfterHeader:  pulumi.Float64(0),
    						SkipRowsBeforeHeader: pulumi.Float64(0),
    						StringsCanBeNull:     pulumi.Bool(false),
    						TrueValues: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    					},
    					ExcelFormat: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatExcelFormatArgs{},
    					JsonlFormat: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatJsonlFormatArgs{},
    					ParquetFormat: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatParquetFormatArgs{
    						DecimalAsFloat: pulumi.Bool(false),
    					},
    					UnstructuredDocumentFormat: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatArgs{
    						Processing: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs{
    							Local: &airbyte.SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocalArgs{},
    						},
    						SkipUnprocessableFiles: pulumi.Bool(false),
    						Strategy:               pulumi.String("string"),
    					},
    				},
    				Name:                      pulumi.String("string"),
    				DaysToSyncIfHistoryIsFull: pulumi.Float64(0),
    				Globs: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				InputSchema:                          pulumi.String("string"),
    				LegacyPrefix:                         pulumi.String("string"),
    				PrimaryKey:                           pulumi.String("string"),
    				RecentNFilesToReadForSchemaDiscovery: pulumi.Float64(0),
    				Schemaless:                           pulumi.Bool(false),
    				UseFirstFoundFileForSchemaDiscovery:  pulumi.Bool(false),
    				ValidationPolicy:                     pulumi.String("string"),
    			},
    		},
    		AzureBlobStorageEndpoint: pulumi.String("string"),
    		DeliveryMethod: &airbyte.SourceAzureBlobStorageConfigurationDeliveryMethodArgs{
    			CopyRawFiles: &airbyte.SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFilesArgs{
    				PreserveDirectoryStructure: pulumi.Bool(false),
    			},
    			ReplicateRecords: &airbyte.SourceAzureBlobStorageConfigurationDeliveryMethodReplicateRecordsArgs{},
    		},
    		StartDate: pulumi.String("string"),
    	},
    	WorkspaceId:  pulumi.String("string"),
    	DefinitionId: pulumi.String("string"),
    	Name:         pulumi.String("string"),
    	SecretId:     pulumi.String("string"),
    })
    
    var sourceAzureBlobStorageResource = new SourceAzureBlobStorage("sourceAzureBlobStorageResource", SourceAzureBlobStorageArgs.builder()
        .configuration(SourceAzureBlobStorageConfigurationArgs.builder()
            .azureBlobStorageAccountName("string")
            .azureBlobStorageContainerName("string")
            .credentials(SourceAzureBlobStorageConfigurationCredentialsArgs.builder()
                .authenticateViaClientCredentials(SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaClientCredentialsArgs.builder()
                    .appClientId("string")
                    .appClientSecret("string")
                    .appTenantId("string")
                    .build())
                .authenticateViaOauth2(SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaOauth2Args.builder()
                    .clientId("string")
                    .clientSecret("string")
                    .refreshToken("string")
                    .tenantId("string")
                    .build())
                .authenticateViaStorageAccountKey(SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs.builder()
                    .azureBlobStorageAccountKey("string")
                    .build())
                .build())
            .streams(SourceAzureBlobStorageConfigurationStreamArgs.builder()
                .format(SourceAzureBlobStorageConfigurationStreamFormatArgs.builder()
                    .avroFormat(SourceAzureBlobStorageConfigurationStreamFormatAvroFormatArgs.builder()
                        .doubleAsString(false)
                        .build())
                    .csvFormat(SourceAzureBlobStorageConfigurationStreamFormatCsvFormatArgs.builder()
                        .delimiter("string")
                        .doubleQuote(false)
                        .encoding("string")
                        .escapeChar("string")
                        .falseValues("string")
                        .headerDefinition(SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionArgs.builder()
                            .autogenerated(SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionAutogeneratedArgs.builder()
                                .build())
                            .fromCsv(SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionFromCsvArgs.builder()
                                .build())
                            .userProvided(SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs.builder()
                                .columnNames("string")
                                .build())
                            .build())
                        .ignoreErrorsOnFieldsMismatch(false)
                        .inferenceType("string")
                        .nullValues("string")
                        .quoteChar("string")
                        .skipRowsAfterHeader(0.0)
                        .skipRowsBeforeHeader(0.0)
                        .stringsCanBeNull(false)
                        .trueValues("string")
                        .build())
                    .excelFormat(SourceAzureBlobStorageConfigurationStreamFormatExcelFormatArgs.builder()
                        .build())
                    .jsonlFormat(SourceAzureBlobStorageConfigurationStreamFormatJsonlFormatArgs.builder()
                        .build())
                    .parquetFormat(SourceAzureBlobStorageConfigurationStreamFormatParquetFormatArgs.builder()
                        .decimalAsFloat(false)
                        .build())
                    .unstructuredDocumentFormat(SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatArgs.builder()
                        .processing(SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs.builder()
                            .local(SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocalArgs.builder()
                                .build())
                            .build())
                        .skipUnprocessableFiles(false)
                        .strategy("string")
                        .build())
                    .build())
                .name("string")
                .daysToSyncIfHistoryIsFull(0.0)
                .globs("string")
                .inputSchema("string")
                .legacyPrefix("string")
                .primaryKey("string")
                .recentNFilesToReadForSchemaDiscovery(0.0)
                .schemaless(false)
                .useFirstFoundFileForSchemaDiscovery(false)
                .validationPolicy("string")
                .build())
            .azureBlobStorageEndpoint("string")
            .deliveryMethod(SourceAzureBlobStorageConfigurationDeliveryMethodArgs.builder()
                .copyRawFiles(SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFilesArgs.builder()
                    .preserveDirectoryStructure(false)
                    .build())
                .replicateRecords(SourceAzureBlobStorageConfigurationDeliveryMethodReplicateRecordsArgs.builder()
                    .build())
                .build())
            .startDate("string")
            .build())
        .workspaceId("string")
        .definitionId("string")
        .name("string")
        .secretId("string")
        .build());
    
    source_azure_blob_storage_resource = airbyte.SourceAzureBlobStorage("sourceAzureBlobStorageResource",
        configuration={
            "azure_blob_storage_account_name": "string",
            "azure_blob_storage_container_name": "string",
            "credentials": {
                "authenticate_via_client_credentials": {
                    "app_client_id": "string",
                    "app_client_secret": "string",
                    "app_tenant_id": "string",
                },
                "authenticate_via_oauth2": {
                    "client_id": "string",
                    "client_secret": "string",
                    "refresh_token": "string",
                    "tenant_id": "string",
                },
                "authenticate_via_storage_account_key": {
                    "azure_blob_storage_account_key": "string",
                },
            },
            "streams": [{
                "format": {
                    "avro_format": {
                        "double_as_string": False,
                    },
                    "csv_format": {
                        "delimiter": "string",
                        "double_quote": False,
                        "encoding": "string",
                        "escape_char": "string",
                        "false_values": ["string"],
                        "header_definition": {
                            "autogenerated": {},
                            "from_csv": {},
                            "user_provided": {
                                "column_names": ["string"],
                            },
                        },
                        "ignore_errors_on_fields_mismatch": False,
                        "inference_type": "string",
                        "null_values": ["string"],
                        "quote_char": "string",
                        "skip_rows_after_header": 0,
                        "skip_rows_before_header": 0,
                        "strings_can_be_null": False,
                        "true_values": ["string"],
                    },
                    "excel_format": {},
                    "jsonl_format": {},
                    "parquet_format": {
                        "decimal_as_float": False,
                    },
                    "unstructured_document_format": {
                        "processing": {
                            "local": {},
                        },
                        "skip_unprocessable_files": False,
                        "strategy": "string",
                    },
                },
                "name": "string",
                "days_to_sync_if_history_is_full": 0,
                "globs": ["string"],
                "input_schema": "string",
                "legacy_prefix": "string",
                "primary_key": "string",
                "recent_n_files_to_read_for_schema_discovery": 0,
                "schemaless": False,
                "use_first_found_file_for_schema_discovery": False,
                "validation_policy": "string",
            }],
            "azure_blob_storage_endpoint": "string",
            "delivery_method": {
                "copy_raw_files": {
                    "preserve_directory_structure": False,
                },
                "replicate_records": {},
            },
            "start_date": "string",
        },
        workspace_id="string",
        definition_id="string",
        name="string",
        secret_id="string")
    
    const sourceAzureBlobStorageResource = new airbyte.SourceAzureBlobStorage("sourceAzureBlobStorageResource", {
        configuration: {
            azureBlobStorageAccountName: "string",
            azureBlobStorageContainerName: "string",
            credentials: {
                authenticateViaClientCredentials: {
                    appClientId: "string",
                    appClientSecret: "string",
                    appTenantId: "string",
                },
                authenticateViaOauth2: {
                    clientId: "string",
                    clientSecret: "string",
                    refreshToken: "string",
                    tenantId: "string",
                },
                authenticateViaStorageAccountKey: {
                    azureBlobStorageAccountKey: "string",
                },
            },
            streams: [{
                format: {
                    avroFormat: {
                        doubleAsString: false,
                    },
                    csvFormat: {
                        delimiter: "string",
                        doubleQuote: false,
                        encoding: "string",
                        escapeChar: "string",
                        falseValues: ["string"],
                        headerDefinition: {
                            autogenerated: {},
                            fromCsv: {},
                            userProvided: {
                                columnNames: ["string"],
                            },
                        },
                        ignoreErrorsOnFieldsMismatch: false,
                        inferenceType: "string",
                        nullValues: ["string"],
                        quoteChar: "string",
                        skipRowsAfterHeader: 0,
                        skipRowsBeforeHeader: 0,
                        stringsCanBeNull: false,
                        trueValues: ["string"],
                    },
                    excelFormat: {},
                    jsonlFormat: {},
                    parquetFormat: {
                        decimalAsFloat: false,
                    },
                    unstructuredDocumentFormat: {
                        processing: {
                            local: {},
                        },
                        skipUnprocessableFiles: false,
                        strategy: "string",
                    },
                },
                name: "string",
                daysToSyncIfHistoryIsFull: 0,
                globs: ["string"],
                inputSchema: "string",
                legacyPrefix: "string",
                primaryKey: "string",
                recentNFilesToReadForSchemaDiscovery: 0,
                schemaless: false,
                useFirstFoundFileForSchemaDiscovery: false,
                validationPolicy: "string",
            }],
            azureBlobStorageEndpoint: "string",
            deliveryMethod: {
                copyRawFiles: {
                    preserveDirectoryStructure: false,
                },
                replicateRecords: {},
            },
            startDate: "string",
        },
        workspaceId: "string",
        definitionId: "string",
        name: "string",
        secretId: "string",
    });
    
    type: airbyte:SourceAzureBlobStorage
    properties:
        configuration:
            azureBlobStorageAccountName: string
            azureBlobStorageContainerName: string
            azureBlobStorageEndpoint: string
            credentials:
                authenticateViaClientCredentials:
                    appClientId: string
                    appClientSecret: string
                    appTenantId: string
                authenticateViaOauth2:
                    clientId: string
                    clientSecret: string
                    refreshToken: string
                    tenantId: string
                authenticateViaStorageAccountKey:
                    azureBlobStorageAccountKey: string
            deliveryMethod:
                copyRawFiles:
                    preserveDirectoryStructure: false
                replicateRecords: {}
            startDate: string
            streams:
                - daysToSyncIfHistoryIsFull: 0
                  format:
                    avroFormat:
                        doubleAsString: false
                    csvFormat:
                        delimiter: string
                        doubleQuote: false
                        encoding: string
                        escapeChar: string
                        falseValues:
                            - string
                        headerDefinition:
                            autogenerated: {}
                            fromCsv: {}
                            userProvided:
                                columnNames:
                                    - string
                        ignoreErrorsOnFieldsMismatch: false
                        inferenceType: string
                        nullValues:
                            - string
                        quoteChar: string
                        skipRowsAfterHeader: 0
                        skipRowsBeforeHeader: 0
                        stringsCanBeNull: false
                        trueValues:
                            - string
                    excelFormat: {}
                    jsonlFormat: {}
                    parquetFormat:
                        decimalAsFloat: false
                    unstructuredDocumentFormat:
                        processing:
                            local: {}
                        skipUnprocessableFiles: false
                        strategy: string
                  globs:
                    - string
                  inputSchema: string
                  legacyPrefix: string
                  name: string
                  primaryKey: string
                  recentNFilesToReadForSchemaDiscovery: 0
                  schemaless: false
                  useFirstFoundFileForSchemaDiscovery: false
                  validationPolicy: string
        definitionId: string
        name: string
        secretId: string
        workspaceId: string
    

    SourceAzureBlobStorage Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The SourceAzureBlobStorage resource accepts the following input properties:

    Configuration SourceAzureBlobStorageConfiguration
    NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Default: "fdaaba68-4875-4ed9-8fcd-4ae1e0a25093"; Requires replacement if changed.
    Name string
    Name of the source e.g. dev-mysql-instance.
    SecretId string
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    Configuration SourceAzureBlobStorageConfigurationArgs
    NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Default: "fdaaba68-4875-4ed9-8fcd-4ae1e0a25093"; Requires replacement if changed.
    Name string
    Name of the source e.g. dev-mysql-instance.
    SecretId string
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    configuration SourceAzureBlobStorageConfiguration
    NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Default: "fdaaba68-4875-4ed9-8fcd-4ae1e0a25093"; Requires replacement if changed.
    name String
    Name of the source e.g. dev-mysql-instance.
    secretId String
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    configuration SourceAzureBlobStorageConfiguration
    NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
    workspaceId string
    definitionId string
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Default: "fdaaba68-4875-4ed9-8fcd-4ae1e0a25093"; Requires replacement if changed.
    name string
    Name of the source e.g. dev-mysql-instance.
    secretId string
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    configuration SourceAzureBlobStorageConfigurationArgs
    NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
    workspace_id str
    definition_id str
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Default: "fdaaba68-4875-4ed9-8fcd-4ae1e0a25093"; Requires replacement if changed.
    name str
    Name of the source e.g. dev-mysql-instance.
    secret_id str
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    configuration Property Map
    NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Default: "fdaaba68-4875-4ed9-8fcd-4ae1e0a25093"; Requires replacement if changed.
    name String
    Name of the source e.g. dev-mysql-instance.
    secretId String
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the SourceAzureBlobStorage resource produces the following output properties:

    CreatedAt double
    Id string
    The provider-assigned unique ID for this managed resource.
    ResourceAllocation SourceAzureBlobStorageResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    SourceId string
    SourceType string
    CreatedAt float64
    Id string
    The provider-assigned unique ID for this managed resource.
    ResourceAllocation SourceAzureBlobStorageResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    SourceId string
    SourceType string
    createdAt Double
    id String
    The provider-assigned unique ID for this managed resource.
    resourceAllocation SourceAzureBlobStorageResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    sourceId String
    sourceType String
    createdAt number
    id string
    The provider-assigned unique ID for this managed resource.
    resourceAllocation SourceAzureBlobStorageResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    sourceId string
    sourceType string
    created_at float
    id str
    The provider-assigned unique ID for this managed resource.
    resource_allocation SourceAzureBlobStorageResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    source_id str
    source_type str
    createdAt Number
    id String
    The provider-assigned unique ID for this managed resource.
    resourceAllocation Property Map
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    sourceId String
    sourceType String

    Look up Existing SourceAzureBlobStorage Resource

    Get an existing SourceAzureBlobStorage resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: SourceAzureBlobStorageState, opts?: CustomResourceOptions): SourceAzureBlobStorage
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            configuration: Optional[SourceAzureBlobStorageConfigurationArgs] = None,
            created_at: Optional[float] = None,
            definition_id: Optional[str] = None,
            name: Optional[str] = None,
            resource_allocation: Optional[SourceAzureBlobStorageResourceAllocationArgs] = None,
            secret_id: Optional[str] = None,
            source_id: Optional[str] = None,
            source_type: Optional[str] = None,
            workspace_id: Optional[str] = None) -> SourceAzureBlobStorage
    func GetSourceAzureBlobStorage(ctx *Context, name string, id IDInput, state *SourceAzureBlobStorageState, opts ...ResourceOption) (*SourceAzureBlobStorage, error)
    public static SourceAzureBlobStorage Get(string name, Input<string> id, SourceAzureBlobStorageState? state, CustomResourceOptions? opts = null)
    public static SourceAzureBlobStorage get(String name, Output<String> id, SourceAzureBlobStorageState state, CustomResourceOptions options)
    resources:  _:    type: airbyte:SourceAzureBlobStorage    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Configuration SourceAzureBlobStorageConfiguration
    NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
    CreatedAt double
    DefinitionId string
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Default: "fdaaba68-4875-4ed9-8fcd-4ae1e0a25093"; Requires replacement if changed.
    Name string
    Name of the source e.g. dev-mysql-instance.
    ResourceAllocation SourceAzureBlobStorageResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    SecretId string
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    SourceId string
    SourceType string
    WorkspaceId string
    Configuration SourceAzureBlobStorageConfigurationArgs
    NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
    CreatedAt float64
    DefinitionId string
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Default: "fdaaba68-4875-4ed9-8fcd-4ae1e0a25093"; Requires replacement if changed.
    Name string
    Name of the source e.g. dev-mysql-instance.
    ResourceAllocation SourceAzureBlobStorageResourceAllocationArgs
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    SecretId string
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    SourceId string
    SourceType string
    WorkspaceId string
    configuration SourceAzureBlobStorageConfiguration
    NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
    createdAt Double
    definitionId String
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Default: "fdaaba68-4875-4ed9-8fcd-4ae1e0a25093"; Requires replacement if changed.
    name String
    Name of the source e.g. dev-mysql-instance.
    resourceAllocation SourceAzureBlobStorageResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    secretId String
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    sourceId String
    sourceType String
    workspaceId String
    configuration SourceAzureBlobStorageConfiguration
    NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
    createdAt number
    definitionId string
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Default: "fdaaba68-4875-4ed9-8fcd-4ae1e0a25093"; Requires replacement if changed.
    name string
    Name of the source e.g. dev-mysql-instance.
    resourceAllocation SourceAzureBlobStorageResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    secretId string
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    sourceId string
    sourceType string
    workspaceId string
    configuration SourceAzureBlobStorageConfigurationArgs
    NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
    created_at float
    definition_id str
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Default: "fdaaba68-4875-4ed9-8fcd-4ae1e0a25093"; Requires replacement if changed.
    name str
    Name of the source e.g. dev-mysql-instance.
    resource_allocation SourceAzureBlobStorageResourceAllocationArgs
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    secret_id str
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    source_id str
    source_type str
    workspace_id str
    configuration Property Map
    NOTE: When this Spec is changed, legacyconfigtransformer.py must also be modified to uptake the changes because it is responsible for converting legacy Azure Blob Storage v0 configs into v1 configs using the File-Based CDK.
    createdAt Number
    definitionId String
    The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Default: "fdaaba68-4875-4ed9-8fcd-4ae1e0a25093"; Requires replacement if changed.
    name String
    Name of the source e.g. dev-mysql-instance.
    resourceAllocation Property Map
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    secretId String
    Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
    sourceId String
    sourceType String
    workspaceId String

    Supporting Types

    SourceAzureBlobStorageConfiguration, SourceAzureBlobStorageConfigurationArgs

    AzureBlobStorageAccountName string
    The account's name of the Azure Blob Storage.
    AzureBlobStorageContainerName string
    The name of the Azure blob storage container.
    Credentials SourceAzureBlobStorageConfigurationCredentials
    Credentials for connecting to the Azure Blob Storage
    Streams List<SourceAzureBlobStorageConfigurationStream>
    Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
    AzureBlobStorageEndpoint string
    This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
    DeliveryMethod SourceAzureBlobStorageConfigurationDeliveryMethod
    StartDate string
    UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
    AzureBlobStorageAccountName string
    The account's name of the Azure Blob Storage.
    AzureBlobStorageContainerName string
    The name of the Azure blob storage container.
    Credentials SourceAzureBlobStorageConfigurationCredentials
    Credentials for connecting to the Azure Blob Storage
    Streams []SourceAzureBlobStorageConfigurationStream
    Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
    AzureBlobStorageEndpoint string
    This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
    DeliveryMethod SourceAzureBlobStorageConfigurationDeliveryMethod
    StartDate string
    UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
    azureBlobStorageAccountName String
    The account's name of the Azure Blob Storage.
    azureBlobStorageContainerName String
    The name of the Azure blob storage container.
    credentials SourceAzureBlobStorageConfigurationCredentials
    Credentials for connecting to the Azure Blob Storage
    streams List<SourceAzureBlobStorageConfigurationStream>
    Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
    azureBlobStorageEndpoint String
    This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
    deliveryMethod SourceAzureBlobStorageConfigurationDeliveryMethod
    startDate String
    UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
    azureBlobStorageAccountName string
    The account's name of the Azure Blob Storage.
    azureBlobStorageContainerName string
    The name of the Azure blob storage container.
    credentials SourceAzureBlobStorageConfigurationCredentials
    Credentials for connecting to the Azure Blob Storage
    streams SourceAzureBlobStorageConfigurationStream[]
    Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
    azureBlobStorageEndpoint string
    This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
    deliveryMethod SourceAzureBlobStorageConfigurationDeliveryMethod
    startDate string
    UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
    azure_blob_storage_account_name str
    The account's name of the Azure Blob Storage.
    azure_blob_storage_container_name str
    The name of the Azure blob storage container.
    credentials SourceAzureBlobStorageConfigurationCredentials
    Credentials for connecting to the Azure Blob Storage
    streams Sequence[SourceAzureBlobStorageConfigurationStream]
    Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
    azure_blob_storage_endpoint str
    This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
    delivery_method SourceAzureBlobStorageConfigurationDeliveryMethod
    start_date str
    UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
    azureBlobStorageAccountName String
    The account's name of the Azure Blob Storage.
    azureBlobStorageContainerName String
    The name of the Azure blob storage container.
    credentials Property Map
    Credentials for connecting to the Azure Blob Storage
    streams List<Property Map>
    Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
    azureBlobStorageEndpoint String
    This is Azure Blob Storage endpoint domain name. Leave default value (or leave it empty if run container from command line) to use Microsoft native from example.
    deliveryMethod Property Map
    startDate String
    UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.

    SourceAzureBlobStorageConfigurationCredentials, SourceAzureBlobStorageConfigurationCredentialsArgs

    SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaClientCredentials, SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaClientCredentialsArgs

    AppClientId string
    Client ID of your Microsoft developer application
    AppClientSecret string
    Client Secret of your Microsoft developer application
    AppTenantId string
    Tenant ID of the Microsoft Azure Application
    AppClientId string
    Client ID of your Microsoft developer application
    AppClientSecret string
    Client Secret of your Microsoft developer application
    AppTenantId string
    Tenant ID of the Microsoft Azure Application
    appClientId String
    Client ID of your Microsoft developer application
    appClientSecret String
    Client Secret of your Microsoft developer application
    appTenantId String
    Tenant ID of the Microsoft Azure Application
    appClientId string
    Client ID of your Microsoft developer application
    appClientSecret string
    Client Secret of your Microsoft developer application
    appTenantId string
    Tenant ID of the Microsoft Azure Application
    app_client_id str
    Client ID of your Microsoft developer application
    app_client_secret str
    Client Secret of your Microsoft developer application
    app_tenant_id str
    Tenant ID of the Microsoft Azure Application
    appClientId String
    Client ID of your Microsoft developer application
    appClientSecret String
    Client Secret of your Microsoft developer application
    appTenantId String
    Tenant ID of the Microsoft Azure Application

    SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaOauth2, SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaOauth2Args

    ClientId string
    Client ID of your Microsoft developer application
    ClientSecret string
    Client Secret of your Microsoft developer application
    RefreshToken string
    Refresh Token of your Microsoft developer application
    TenantId string
    Tenant ID of the Microsoft Azure Application user
    ClientId string
    Client ID of your Microsoft developer application
    ClientSecret string
    Client Secret of your Microsoft developer application
    RefreshToken string
    Refresh Token of your Microsoft developer application
    TenantId string
    Tenant ID of the Microsoft Azure Application user
    clientId String
    Client ID of your Microsoft developer application
    clientSecret String
    Client Secret of your Microsoft developer application
    refreshToken String
    Refresh Token of your Microsoft developer application
    tenantId String
    Tenant ID of the Microsoft Azure Application user
    clientId string
    Client ID of your Microsoft developer application
    clientSecret string
    Client Secret of your Microsoft developer application
    refreshToken string
    Refresh Token of your Microsoft developer application
    tenantId string
    Tenant ID of the Microsoft Azure Application user
    client_id str
    Client ID of your Microsoft developer application
    client_secret str
    Client Secret of your Microsoft developer application
    refresh_token str
    Refresh Token of your Microsoft developer application
    tenant_id str
    Tenant ID of the Microsoft Azure Application user
    clientId String
    Client ID of your Microsoft developer application
    clientSecret String
    Client Secret of your Microsoft developer application
    refreshToken String
    Refresh Token of your Microsoft developer application
    tenantId String
    Tenant ID of the Microsoft Azure Application user

    SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKey, SourceAzureBlobStorageConfigurationCredentialsAuthenticateViaStorageAccountKeyArgs

    AzureBlobStorageAccountKey string
    The Azure blob storage account key.
    AzureBlobStorageAccountKey string
    The Azure blob storage account key.
    azureBlobStorageAccountKey String
    The Azure blob storage account key.
    azureBlobStorageAccountKey string
    The Azure blob storage account key.
    azure_blob_storage_account_key str
    The Azure blob storage account key.
    azureBlobStorageAccountKey String
    The Azure blob storage account key.

    SourceAzureBlobStorageConfigurationDeliveryMethod, SourceAzureBlobStorageConfigurationDeliveryMethodArgs

    CopyRawFiles SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFiles
    Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
    ReplicateRecords SourceAzureBlobStorageConfigurationDeliveryMethodReplicateRecords
    Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
    CopyRawFiles SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFiles
    Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
    ReplicateRecords SourceAzureBlobStorageConfigurationDeliveryMethodReplicateRecords
    Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
    copyRawFiles SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFiles
    Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
    replicateRecords SourceAzureBlobStorageConfigurationDeliveryMethodReplicateRecords
    Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
    copyRawFiles SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFiles
    Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
    replicateRecords SourceAzureBlobStorageConfigurationDeliveryMethodReplicateRecords
    Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
    copy_raw_files SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFiles
    Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
    replicate_records SourceAzureBlobStorageConfigurationDeliveryMethodReplicateRecords
    Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
    copyRawFiles Property Map
    Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
    replicateRecords Property Map
    Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.

    SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFiles, SourceAzureBlobStorageConfigurationDeliveryMethodCopyRawFilesArgs

    PreserveDirectoryStructure bool
    If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
    PreserveDirectoryStructure bool
    If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
    preserveDirectoryStructure Boolean
    If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
    preserveDirectoryStructure boolean
    If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
    preserve_directory_structure bool
    If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
    preserveDirectoryStructure Boolean
    If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true

    SourceAzureBlobStorageConfigurationStream, SourceAzureBlobStorageConfigurationStreamArgs

    Format SourceAzureBlobStorageConfigurationStreamFormat
    The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
    Name string
    The name of the stream.
    DaysToSyncIfHistoryIsFull double
    When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
    Globs List<string>
    The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n. Default: ["**"]
    InputSchema string
    The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
    LegacyPrefix string
    The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.
    PrimaryKey string
    The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.
    RecentNFilesToReadForSchemaDiscovery double
    The number of resent files which will be used to discover the schema for this stream.
    Schemaless bool
    When enabled, syncs will not validate or structure records against the stream's schema. Default: false
    UseFirstFoundFileForSchemaDiscovery bool
    When enabled, the source will use the first found file for schema discovery. Helps to avoid long discovery step. Default: false
    ValidationPolicy string
    The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
    Format SourceAzureBlobStorageConfigurationStreamFormat
    The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
    Name string
    The name of the stream.
    DaysToSyncIfHistoryIsFull float64
    When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
    Globs []string
    The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n. Default: ["**"]
    InputSchema string
    The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
    LegacyPrefix string
    The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.
    PrimaryKey string
    The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.
    RecentNFilesToReadForSchemaDiscovery float64
    The number of resent files which will be used to discover the schema for this stream.
    Schemaless bool
    When enabled, syncs will not validate or structure records against the stream's schema. Default: false
    UseFirstFoundFileForSchemaDiscovery bool
    When enabled, the source will use the first found file for schema discovery. Helps to avoid long discovery step. Default: false
    ValidationPolicy string
    The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
    format SourceAzureBlobStorageConfigurationStreamFormat
    The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
    name String
    The name of the stream.
    daysToSyncIfHistoryIsFull Double
    When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
    globs List<String>
    The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n. Default: ["**"]
    inputSchema String
    The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
    legacyPrefix String
    The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.
    primaryKey String
    The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.
    recentNFilesToReadForSchemaDiscovery Double
    The number of resent files which will be used to discover the schema for this stream.
    schemaless Boolean
    When enabled, syncs will not validate or structure records against the stream's schema. Default: false
    useFirstFoundFileForSchemaDiscovery Boolean
    When enabled, the source will use the first found file for schema discovery. Helps to avoid long discovery step. Default: false
    validationPolicy String
    The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
    format SourceAzureBlobStorageConfigurationStreamFormat
    The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
    name string
    The name of the stream.
    daysToSyncIfHistoryIsFull number
    When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
    globs string[]
    The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n. Default: ["**"]
    inputSchema string
    The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
    legacyPrefix string
    The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.
    primaryKey string
    The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.
    recentNFilesToReadForSchemaDiscovery number
    The number of resent files which will be used to discover the schema for this stream.
    schemaless boolean
    When enabled, syncs will not validate or structure records against the stream's schema. Default: false
    useFirstFoundFileForSchemaDiscovery boolean
    When enabled, the source will use the first found file for schema discovery. Helps to avoid long discovery step. Default: false
    validationPolicy string
    The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
    format SourceAzureBlobStorageConfigurationStreamFormat
    The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
    name str
    The name of the stream.
    days_to_sync_if_history_is_full float
    When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
    globs Sequence[str]
    The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n. Default: ["**"]
    input_schema str
    The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
    legacy_prefix str
    The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.
    primary_key str
    The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.
    recent_n_files_to_read_for_schema_discovery float
    The number of resent files which will be used to discover the schema for this stream.
    schemaless bool
    When enabled, syncs will not validate or structure records against the stream's schema. Default: false
    use_first_found_file_for_schema_discovery bool
    When enabled, the source will use the first found file for schema discovery. Helps to avoid long discovery step. Default: false
    validation_policy str
    The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
    format Property Map
    The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
    name String
    The name of the stream.
    daysToSyncIfHistoryIsFull Number
    When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
    globs List<String>
    The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n. Default: ["**"]
    inputSchema String
    The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
    legacyPrefix String
    The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.
    primaryKey String
    The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.
    recentNFilesToReadForSchemaDiscovery Number
    The number of resent files which will be used to discover the schema for this stream.
    schemaless Boolean
    When enabled, syncs will not validate or structure records against the stream's schema. Default: false
    useFirstFoundFileForSchemaDiscovery Boolean
    When enabled, the source will use the first found file for schema discovery. Helps to avoid long discovery step. Default: false
    validationPolicy String
    The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]

    SourceAzureBlobStorageConfigurationStreamFormat, SourceAzureBlobStorageConfigurationStreamFormatArgs

    SourceAzureBlobStorageConfigurationStreamFormatAvroFormat, SourceAzureBlobStorageConfigurationStreamFormatAvroFormatArgs

    DoubleAsString bool
    Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
    DoubleAsString bool
    Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
    doubleAsString Boolean
    Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
    doubleAsString boolean
    Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
    double_as_string bool
    Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
    doubleAsString Boolean
    Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false

    SourceAzureBlobStorageConfigurationStreamFormatCsvFormat, SourceAzureBlobStorageConfigurationStreamFormatCsvFormatArgs

    Delimiter string
    The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
    DoubleQuote bool
    Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
    Encoding string
    The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
    EscapeChar string
    The character used for escaping special characters. To disallow escaping, leave this field blank.
    FalseValues List<string>
    A set of case-sensitive strings that should be interpreted as false values. Default: ["n","no","f","false","off","0"]
    HeaderDefinition SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinition
    How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
    IgnoreErrorsOnFieldsMismatch bool
    Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
    InferenceType string
    How to infer the types of the columns. If none, inference default to strings. must be one of ["None", "Primitive Types Only"]
    NullValues List<string>
    A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. Default: []
    QuoteChar string
    The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
    SkipRowsAfterHeader double
    The number of rows to skip after the header row. Default: 0
    SkipRowsBeforeHeader double
    The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
    StringsCanBeNull bool
    Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
    TrueValues List<string>
    A set of case-sensitive strings that should be interpreted as true values. Default: ["y","yes","t","true","on","1"]
    Delimiter string
    The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
    DoubleQuote bool
    Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
    Encoding string
    The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
    EscapeChar string
    The character used for escaping special characters. To disallow escaping, leave this field blank.
    FalseValues []string
    A set of case-sensitive strings that should be interpreted as false values. Default: ["n","no","f","false","off","0"]
    HeaderDefinition SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinition
    How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
    IgnoreErrorsOnFieldsMismatch bool
    Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
    InferenceType string
    How to infer the types of the columns. If none, inference default to strings. must be one of ["None", "Primitive Types Only"]
    NullValues []string
    A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. Default: []
    QuoteChar string
    The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
    SkipRowsAfterHeader float64
    The number of rows to skip after the header row. Default: 0
    SkipRowsBeforeHeader float64
    The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
    StringsCanBeNull bool
    Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
    TrueValues []string
    A set of case-sensitive strings that should be interpreted as true values. Default: ["y","yes","t","true","on","1"]
    delimiter String
    The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
    doubleQuote Boolean
    Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
    encoding String
    The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
    escapeChar String
    The character used for escaping special characters. To disallow escaping, leave this field blank.
    falseValues List<String>
    A set of case-sensitive strings that should be interpreted as false values. Default: ["n","no","f","false","off","0"]
    headerDefinition SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinition
    How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
    ignoreErrorsOnFieldsMismatch Boolean
    Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
    inferenceType String
    How to infer the types of the columns. If none, inference default to strings. must be one of ["None", "Primitive Types Only"]
    nullValues List<String>
    A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. Default: []
    quoteChar String
    The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
    skipRowsAfterHeader Double
    The number of rows to skip after the header row. Default: 0
    skipRowsBeforeHeader Double
    The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
    stringsCanBeNull Boolean
    Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
    trueValues List<String>
    A set of case-sensitive strings that should be interpreted as true values. Default: ["y","yes","t","true","on","1"]
    delimiter string
    The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
    doubleQuote boolean
    Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
    encoding string
    The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
    escapeChar string
    The character used for escaping special characters. To disallow escaping, leave this field blank.
    falseValues string[]
    A set of case-sensitive strings that should be interpreted as false values. Default: ["n","no","f","false","off","0"]
    headerDefinition SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinition
    How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
    ignoreErrorsOnFieldsMismatch boolean
    Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
    inferenceType string
    How to infer the types of the columns. If none, inference default to strings. must be one of ["None", "Primitive Types Only"]
    nullValues string[]
    A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. Default: []
    quoteChar string
    The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
    skipRowsAfterHeader number
    The number of rows to skip after the header row. Default: 0
    skipRowsBeforeHeader number
    The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
    stringsCanBeNull boolean
    Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
    trueValues string[]
    A set of case-sensitive strings that should be interpreted as true values. Default: ["y","yes","t","true","on","1"]
    delimiter str
    The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
    double_quote bool
    Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
    encoding str
    The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
    escape_char str
    The character used for escaping special characters. To disallow escaping, leave this field blank.
    false_values Sequence[str]
    A set of case-sensitive strings that should be interpreted as false values. Default: ["n","no","f","false","off","0"]
    header_definition SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinition
    How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
    ignore_errors_on_fields_mismatch bool
    Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
    inference_type str
    How to infer the types of the columns. If none, inference default to strings. must be one of ["None", "Primitive Types Only"]
    null_values Sequence[str]
    A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. Default: []
    quote_char str
    The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
    skip_rows_after_header float
    The number of rows to skip after the header row. Default: 0
    skip_rows_before_header float
    The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
    strings_can_be_null bool
    Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
    true_values Sequence[str]
    A set of case-sensitive strings that should be interpreted as true values. Default: ["y","yes","t","true","on","1"]
    delimiter String
    The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
    doubleQuote Boolean
    Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
    encoding String
    The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
    escapeChar String
    The character used for escaping special characters. To disallow escaping, leave this field blank.
    falseValues List<String>
    A set of case-sensitive strings that should be interpreted as false values. Default: ["n","no","f","false","off","0"]
    headerDefinition Property Map
    How headers will be defined. User Provided assumes the CSV does not have a header row and uses the headers provided and Autogenerated assumes the CSV does not have a header row and the CDK will generate headers using for f{i} where i is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.
    ignoreErrorsOnFieldsMismatch Boolean
    Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
    inferenceType String
    How to infer the types of the columns. If none, inference default to strings. must be one of ["None", "Primitive Types Only"]
    nullValues List<String>
    A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. Default: []
    quoteChar String
    The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
    skipRowsAfterHeader Number
    The number of rows to skip after the header row. Default: 0
    skipRowsBeforeHeader Number
    The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
    stringsCanBeNull Boolean
    Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
    trueValues List<String>
    A set of case-sensitive strings that should be interpreted as true values. Default: ["y","yes","t","true","on","1"]

    SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinition, SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionArgs

    SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvided, SourceAzureBlobStorageConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs

    ColumnNames List<string>
    The column names that will be used while emitting the CSV records
    ColumnNames []string
    The column names that will be used while emitting the CSV records
    columnNames List<String>
    The column names that will be used while emitting the CSV records
    columnNames string[]
    The column names that will be used while emitting the CSV records
    column_names Sequence[str]
    The column names that will be used while emitting the CSV records
    columnNames List<String>
    The column names that will be used while emitting the CSV records

    SourceAzureBlobStorageConfigurationStreamFormatParquetFormat, SourceAzureBlobStorageConfigurationStreamFormatParquetFormatArgs

    DecimalAsFloat bool
    Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
    DecimalAsFloat bool
    Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
    decimalAsFloat Boolean
    Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
    decimalAsFloat boolean
    Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
    decimal_as_float bool
    Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
    decimalAsFloat Boolean
    Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false

    SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormat, SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatArgs

    Processing SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessing
    Processing configuration
    SkipUnprocessableFiles bool
    If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
    Strategy string
    The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
    Processing SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessing
    Processing configuration
    SkipUnprocessableFiles bool
    If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
    Strategy string
    The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
    processing SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessing
    Processing configuration
    skipUnprocessableFiles Boolean
    If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
    strategy String
    The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
    processing SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessing
    Processing configuration
    skipUnprocessableFiles boolean
    If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
    strategy string
    The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
    processing SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessing
    Processing configuration
    skip_unprocessable_files bool
    If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
    strategy str
    The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
    processing Property Map
    Processing configuration
    skipUnprocessableFiles Boolean
    If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
    strategy String
    The strategy used to parse documents. fast extracts text directly from the document which doesn't work for all files. ocr_only is more reliable, but slower. hi_res is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]

    SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessing, SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs

    Local SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
    Process files locally, supporting fast and ocr modes. This is the default option.
    Local SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
    Process files locally, supporting fast and ocr modes. This is the default option.
    local SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
    Process files locally, supporting fast and ocr modes. This is the default option.
    local SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
    Process files locally, supporting fast and ocr modes. This is the default option.
    local SourceAzureBlobStorageConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocal
    Process files locally, supporting fast and ocr modes. This is the default option.
    local Property Map
    Process files locally, supporting fast and ocr modes. This is the default option.

    SourceAzureBlobStorageResourceAllocation, SourceAzureBlobStorageResourceAllocationArgs

    Default SourceAzureBlobStorageResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    JobSpecifics []SourceAzureBlobStorageResourceAllocationJobSpecific
    default SourceAzureBlobStorageResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics SourceAzureBlobStorageResourceAllocationJobSpecific[]
    default Property Map
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics List<Property Map>

    SourceAzureBlobStorageResourceAllocationDefault, SourceAzureBlobStorageResourceAllocationDefaultArgs

    SourceAzureBlobStorageResourceAllocationJobSpecific, SourceAzureBlobStorageResourceAllocationJobSpecificArgs

    JobType string
    enum that describes the different types of jobs that the platform runs.
    ResourceRequirements SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    JobType string
    enum that describes the different types of jobs that the platform runs.
    ResourceRequirements SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType String
    enum that describes the different types of jobs that the platform runs.
    resourceRequirements SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType string
    enum that describes the different types of jobs that the platform runs.
    resourceRequirements SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    job_type str
    enum that describes the different types of jobs that the platform runs.
    resource_requirements SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType String
    enum that describes the different types of jobs that the platform runs.
    resourceRequirements Property Map
    optional resource requirements to run workers (blank for unbounded allocations)

    SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirements, SourceAzureBlobStorageResourceAllocationJobSpecificResourceRequirementsArgs

    Import

    In Terraform v1.5.0 and later, the import block can be used with the id attribute, for example:

    terraform

    import {

    to = airbyte_source_azure_blob_storage.my_airbyte_source_azure_blob_storage

    id = “…”

    }

    The pulumi import command can be used, for example:

    $ pulumi import airbyte:index/sourceAzureBlobStorage:SourceAzureBlobStorage my_airbyte_source_azure_blob_storage "..."
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    airbyte airbytehq/terraform-provider-airbyte
    License
    Notes
    This Pulumi package is based on the airbyte Terraform Provider.
    airbyte logo
    airbyte 1.0.0-rc6 published on Monday, Feb 16, 2026 by airbytehq
      Meet Neo: Your AI Platform Teammate