1. Packages
  2. Azure Classic
  3. API Docs
  4. streamanalytics
  5. OutputBlob

We recommend using Azure Native.

Azure Classic v5.72.0 published on Monday, Apr 15, 2024 by Pulumi

azure.streamanalytics.OutputBlob

Explore with Pulumi AI

azure logo

We recommend using Azure Native.

Azure Classic v5.72.0 published on Monday, Apr 15, 2024 by Pulumi

    Manages a Stream Analytics Output to Blob Storage.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as azure from "@pulumi/azure";
    
    const exampleResourceGroup = new azure.core.ResourceGroup("example", {
        name: "rg-example",
        location: "West Europe",
    });
    const example = azure.streamanalytics.getJobOutput({
        name: "example-job",
        resourceGroupName: exampleResourceGroup.name,
    });
    const exampleAccount = new azure.storage.Account("example", {
        name: "examplesa",
        resourceGroupName: exampleResourceGroup.name,
        location: exampleResourceGroup.location,
        accountTier: "Standard",
        accountReplicationType: "LRS",
    });
    const exampleContainer = new azure.storage.Container("example", {
        name: "example",
        storageAccountName: exampleAccount.name,
        containerAccessType: "private",
    });
    const exampleOutputBlob = new azure.streamanalytics.OutputBlob("example", {
        name: "output-to-blob-storage",
        streamAnalyticsJobName: example.apply(example => example.name),
        resourceGroupName: example.apply(example => example.resourceGroupName),
        storageAccountName: exampleAccount.name,
        storageAccountKey: exampleAccount.primaryAccessKey,
        storageContainerName: exampleContainer.name,
        pathPattern: "some-pattern",
        dateFormat: "yyyy-MM-dd",
        timeFormat: "HH",
        serialization: {
            type: "Csv",
            encoding: "UTF8",
            fieldDelimiter: ",",
        },
    });
    
    import pulumi
    import pulumi_azure as azure
    
    example_resource_group = azure.core.ResourceGroup("example",
        name="rg-example",
        location="West Europe")
    example = azure.streamanalytics.get_job_output(name="example-job",
        resource_group_name=example_resource_group.name)
    example_account = azure.storage.Account("example",
        name="examplesa",
        resource_group_name=example_resource_group.name,
        location=example_resource_group.location,
        account_tier="Standard",
        account_replication_type="LRS")
    example_container = azure.storage.Container("example",
        name="example",
        storage_account_name=example_account.name,
        container_access_type="private")
    example_output_blob = azure.streamanalytics.OutputBlob("example",
        name="output-to-blob-storage",
        stream_analytics_job_name=example.name,
        resource_group_name=example.resource_group_name,
        storage_account_name=example_account.name,
        storage_account_key=example_account.primary_access_key,
        storage_container_name=example_container.name,
        path_pattern="some-pattern",
        date_format="yyyy-MM-dd",
        time_format="HH",
        serialization=azure.streamanalytics.OutputBlobSerializationArgs(
            type="Csv",
            encoding="UTF8",
            field_delimiter=",",
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
    	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/storage"
    	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/streamanalytics"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		exampleResourceGroup, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
    			Name:     pulumi.String("rg-example"),
    			Location: pulumi.String("West Europe"),
    		})
    		if err != nil {
    			return err
    		}
    		example := streamanalytics.LookupJobOutput(ctx, streamanalytics.GetJobOutputArgs{
    			Name:              pulumi.String("example-job"),
    			ResourceGroupName: exampleResourceGroup.Name,
    		}, nil)
    		exampleAccount, err := storage.NewAccount(ctx, "example", &storage.AccountArgs{
    			Name:                   pulumi.String("examplesa"),
    			ResourceGroupName:      exampleResourceGroup.Name,
    			Location:               exampleResourceGroup.Location,
    			AccountTier:            pulumi.String("Standard"),
    			AccountReplicationType: pulumi.String("LRS"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleContainer, err := storage.NewContainer(ctx, "example", &storage.ContainerArgs{
    			Name:                pulumi.String("example"),
    			StorageAccountName:  exampleAccount.Name,
    			ContainerAccessType: pulumi.String("private"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = streamanalytics.NewOutputBlob(ctx, "example", &streamanalytics.OutputBlobArgs{
    			Name: pulumi.String("output-to-blob-storage"),
    			StreamAnalyticsJobName: example.ApplyT(func(example streamanalytics.GetJobResult) (*string, error) {
    				return &example.Name, nil
    			}).(pulumi.StringPtrOutput),
    			ResourceGroupName: example.ApplyT(func(example streamanalytics.GetJobResult) (*string, error) {
    				return &example.ResourceGroupName, nil
    			}).(pulumi.StringPtrOutput),
    			StorageAccountName:   exampleAccount.Name,
    			StorageAccountKey:    exampleAccount.PrimaryAccessKey,
    			StorageContainerName: exampleContainer.Name,
    			PathPattern:          pulumi.String("some-pattern"),
    			DateFormat:           pulumi.String("yyyy-MM-dd"),
    			TimeFormat:           pulumi.String("HH"),
    			Serialization: &streamanalytics.OutputBlobSerializationArgs{
    				Type:           pulumi.String("Csv"),
    				Encoding:       pulumi.String("UTF8"),
    				FieldDelimiter: pulumi.String(","),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Azure = Pulumi.Azure;
    
    return await Deployment.RunAsync(() => 
    {
        var exampleResourceGroup = new Azure.Core.ResourceGroup("example", new()
        {
            Name = "rg-example",
            Location = "West Europe",
        });
    
        var example = Azure.StreamAnalytics.GetJob.Invoke(new()
        {
            Name = "example-job",
            ResourceGroupName = exampleResourceGroup.Name,
        });
    
        var exampleAccount = new Azure.Storage.Account("example", new()
        {
            Name = "examplesa",
            ResourceGroupName = exampleResourceGroup.Name,
            Location = exampleResourceGroup.Location,
            AccountTier = "Standard",
            AccountReplicationType = "LRS",
        });
    
        var exampleContainer = new Azure.Storage.Container("example", new()
        {
            Name = "example",
            StorageAccountName = exampleAccount.Name,
            ContainerAccessType = "private",
        });
    
        var exampleOutputBlob = new Azure.StreamAnalytics.OutputBlob("example", new()
        {
            Name = "output-to-blob-storage",
            StreamAnalyticsJobName = example.Apply(getJobResult => getJobResult.Name),
            ResourceGroupName = example.Apply(getJobResult => getJobResult.ResourceGroupName),
            StorageAccountName = exampleAccount.Name,
            StorageAccountKey = exampleAccount.PrimaryAccessKey,
            StorageContainerName = exampleContainer.Name,
            PathPattern = "some-pattern",
            DateFormat = "yyyy-MM-dd",
            TimeFormat = "HH",
            Serialization = new Azure.StreamAnalytics.Inputs.OutputBlobSerializationArgs
            {
                Type = "Csv",
                Encoding = "UTF8",
                FieldDelimiter = ",",
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.azure.core.ResourceGroup;
    import com.pulumi.azure.core.ResourceGroupArgs;
    import com.pulumi.azure.streamanalytics.StreamanalyticsFunctions;
    import com.pulumi.azure.streamanalytics.inputs.GetJobArgs;
    import com.pulumi.azure.storage.Account;
    import com.pulumi.azure.storage.AccountArgs;
    import com.pulumi.azure.storage.Container;
    import com.pulumi.azure.storage.ContainerArgs;
    import com.pulumi.azure.streamanalytics.OutputBlob;
    import com.pulumi.azure.streamanalytics.OutputBlobArgs;
    import com.pulumi.azure.streamanalytics.inputs.OutputBlobSerializationArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()        
                .name("rg-example")
                .location("West Europe")
                .build());
    
            final var example = StreamanalyticsFunctions.getJob(GetJobArgs.builder()
                .name("example-job")
                .resourceGroupName(exampleResourceGroup.name())
                .build());
    
            var exampleAccount = new Account("exampleAccount", AccountArgs.builder()        
                .name("examplesa")
                .resourceGroupName(exampleResourceGroup.name())
                .location(exampleResourceGroup.location())
                .accountTier("Standard")
                .accountReplicationType("LRS")
                .build());
    
            var exampleContainer = new Container("exampleContainer", ContainerArgs.builder()        
                .name("example")
                .storageAccountName(exampleAccount.name())
                .containerAccessType("private")
                .build());
    
            var exampleOutputBlob = new OutputBlob("exampleOutputBlob", OutputBlobArgs.builder()        
                .name("output-to-blob-storage")
                .streamAnalyticsJobName(example.applyValue(getJobResult -> getJobResult).applyValue(example -> example.applyValue(getJobResult -> getJobResult.name())))
                .resourceGroupName(example.applyValue(getJobResult -> getJobResult).applyValue(example -> example.applyValue(getJobResult -> getJobResult.resourceGroupName())))
                .storageAccountName(exampleAccount.name())
                .storageAccountKey(exampleAccount.primaryAccessKey())
                .storageContainerName(exampleContainer.name())
                .pathPattern("some-pattern")
                .dateFormat("yyyy-MM-dd")
                .timeFormat("HH")
                .serialization(OutputBlobSerializationArgs.builder()
                    .type("Csv")
                    .encoding("UTF8")
                    .fieldDelimiter(",")
                    .build())
                .build());
    
        }
    }
    
    resources:
      exampleResourceGroup:
        type: azure:core:ResourceGroup
        name: example
        properties:
          name: rg-example
          location: West Europe
      exampleAccount:
        type: azure:storage:Account
        name: example
        properties:
          name: examplesa
          resourceGroupName: ${exampleResourceGroup.name}
          location: ${exampleResourceGroup.location}
          accountTier: Standard
          accountReplicationType: LRS
      exampleContainer:
        type: azure:storage:Container
        name: example
        properties:
          name: example
          storageAccountName: ${exampleAccount.name}
          containerAccessType: private
      exampleOutputBlob:
        type: azure:streamanalytics:OutputBlob
        name: example
        properties:
          name: output-to-blob-storage
          streamAnalyticsJobName: ${example.name}
          resourceGroupName: ${example.resourceGroupName}
          storageAccountName: ${exampleAccount.name}
          storageAccountKey: ${exampleAccount.primaryAccessKey}
          storageContainerName: ${exampleContainer.name}
          pathPattern: some-pattern
          dateFormat: yyyy-MM-dd
          timeFormat: HH
          serialization:
            type: Csv
            encoding: UTF8
            fieldDelimiter: ','
    variables:
      example:
        fn::invoke:
          Function: azure:streamanalytics:getJob
          Arguments:
            name: example-job
            resourceGroupName: ${exampleResourceGroup.name}
    

    Create OutputBlob Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new OutputBlob(name: string, args: OutputBlobArgs, opts?: CustomResourceOptions);
    @overload
    def OutputBlob(resource_name: str,
                   args: OutputBlobArgs,
                   opts: Optional[ResourceOptions] = None)
    
    @overload
    def OutputBlob(resource_name: str,
                   opts: Optional[ResourceOptions] = None,
                   serialization: Optional[OutputBlobSerializationArgs] = None,
                   time_format: Optional[str] = None,
                   stream_analytics_job_name: Optional[str] = None,
                   storage_container_name: Optional[str] = None,
                   date_format: Optional[str] = None,
                   storage_account_name: Optional[str] = None,
                   path_pattern: Optional[str] = None,
                   resource_group_name: Optional[str] = None,
                   blob_write_mode: Optional[str] = None,
                   storage_account_key: Optional[str] = None,
                   name: Optional[str] = None,
                   authentication_mode: Optional[str] = None,
                   batch_min_rows: Optional[int] = None,
                   batch_max_wait_time: Optional[str] = None)
    func NewOutputBlob(ctx *Context, name string, args OutputBlobArgs, opts ...ResourceOption) (*OutputBlob, error)
    public OutputBlob(string name, OutputBlobArgs args, CustomResourceOptions? opts = null)
    public OutputBlob(String name, OutputBlobArgs args)
    public OutputBlob(String name, OutputBlobArgs args, CustomResourceOptions options)
    
    type: azure:streamanalytics:OutputBlob
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args OutputBlobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args OutputBlobArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args OutputBlobArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args OutputBlobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args OutputBlobArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Example

    The following reference example uses placeholder values for all input properties.

    var outputBlobResource = new Azure.StreamAnalytics.OutputBlob("outputBlobResource", new()
    {
        Serialization = new Azure.StreamAnalytics.Inputs.OutputBlobSerializationArgs
        {
            Type = "string",
            Encoding = "string",
            FieldDelimiter = "string",
            Format = "string",
        },
        TimeFormat = "string",
        StreamAnalyticsJobName = "string",
        StorageContainerName = "string",
        DateFormat = "string",
        StorageAccountName = "string",
        PathPattern = "string",
        ResourceGroupName = "string",
        BlobWriteMode = "string",
        StorageAccountKey = "string",
        Name = "string",
        AuthenticationMode = "string",
        BatchMinRows = 0,
        BatchMaxWaitTime = "string",
    });
    
    example, err := streamanalytics.NewOutputBlob(ctx, "outputBlobResource", &streamanalytics.OutputBlobArgs{
    	Serialization: &streamanalytics.OutputBlobSerializationArgs{
    		Type:           pulumi.String("string"),
    		Encoding:       pulumi.String("string"),
    		FieldDelimiter: pulumi.String("string"),
    		Format:         pulumi.String("string"),
    	},
    	TimeFormat:             pulumi.String("string"),
    	StreamAnalyticsJobName: pulumi.String("string"),
    	StorageContainerName:   pulumi.String("string"),
    	DateFormat:             pulumi.String("string"),
    	StorageAccountName:     pulumi.String("string"),
    	PathPattern:            pulumi.String("string"),
    	ResourceGroupName:      pulumi.String("string"),
    	BlobWriteMode:          pulumi.String("string"),
    	StorageAccountKey:      pulumi.String("string"),
    	Name:                   pulumi.String("string"),
    	AuthenticationMode:     pulumi.String("string"),
    	BatchMinRows:           pulumi.Int(0),
    	BatchMaxWaitTime:       pulumi.String("string"),
    })
    
    var outputBlobResource = new OutputBlob("outputBlobResource", OutputBlobArgs.builder()        
        .serialization(OutputBlobSerializationArgs.builder()
            .type("string")
            .encoding("string")
            .fieldDelimiter("string")
            .format("string")
            .build())
        .timeFormat("string")
        .streamAnalyticsJobName("string")
        .storageContainerName("string")
        .dateFormat("string")
        .storageAccountName("string")
        .pathPattern("string")
        .resourceGroupName("string")
        .blobWriteMode("string")
        .storageAccountKey("string")
        .name("string")
        .authenticationMode("string")
        .batchMinRows(0)
        .batchMaxWaitTime("string")
        .build());
    
    output_blob_resource = azure.streamanalytics.OutputBlob("outputBlobResource",
        serialization=azure.streamanalytics.OutputBlobSerializationArgs(
            type="string",
            encoding="string",
            field_delimiter="string",
            format="string",
        ),
        time_format="string",
        stream_analytics_job_name="string",
        storage_container_name="string",
        date_format="string",
        storage_account_name="string",
        path_pattern="string",
        resource_group_name="string",
        blob_write_mode="string",
        storage_account_key="string",
        name="string",
        authentication_mode="string",
        batch_min_rows=0,
        batch_max_wait_time="string")
    
    const outputBlobResource = new azure.streamanalytics.OutputBlob("outputBlobResource", {
        serialization: {
            type: "string",
            encoding: "string",
            fieldDelimiter: "string",
            format: "string",
        },
        timeFormat: "string",
        streamAnalyticsJobName: "string",
        storageContainerName: "string",
        dateFormat: "string",
        storageAccountName: "string",
        pathPattern: "string",
        resourceGroupName: "string",
        blobWriteMode: "string",
        storageAccountKey: "string",
        name: "string",
        authenticationMode: "string",
        batchMinRows: 0,
        batchMaxWaitTime: "string",
    });
    
    type: azure:streamanalytics:OutputBlob
    properties:
        authenticationMode: string
        batchMaxWaitTime: string
        batchMinRows: 0
        blobWriteMode: string
        dateFormat: string
        name: string
        pathPattern: string
        resourceGroupName: string
        serialization:
            encoding: string
            fieldDelimiter: string
            format: string
            type: string
        storageAccountKey: string
        storageAccountName: string
        storageContainerName: string
        streamAnalyticsJobName: string
        timeFormat: string
    

    OutputBlob Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The OutputBlob resource accepts the following input properties:

    DateFormat string
    The date format. Wherever {date} appears in path_pattern, the value of this property is used as the date format instead.
    PathPattern string
    The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
    ResourceGroupName string
    The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
    Serialization OutputBlobSerialization
    A serialization block as defined below.
    StorageAccountName string
    The name of the Storage Account.
    StorageContainerName string
    The name of the Container within the Storage Account.
    StreamAnalyticsJobName string
    The name of the Stream Analytics Job. Changing this forces a new resource to be created.
    TimeFormat string
    The time format. Wherever {time} appears in path_pattern, the value of this property is used as the time format instead.
    AuthenticationMode string
    The authentication mode for the Stream Output. Possible values are Msi and ConnectionString. Defaults to ConnectionString.
    BatchMaxWaitTime string
    The maximum wait time per batch in hh:mm:ss e.g. 00:02:00 for two minutes.
    BatchMinRows int
    The minimum number of rows per batch (must be between 0 and 1000000).
    BlobWriteMode string
    Determines whether blob blocks are either committed automatically or appended. Possible values are Append and Once. Defaults to Append.
    Name string
    The name of the Stream Output. Changing this forces a new resource to be created.
    StorageAccountKey string
    The Access Key which should be used to connect to this Storage Account.
    DateFormat string
    The date format. Wherever {date} appears in path_pattern, the value of this property is used as the date format instead.
    PathPattern string
    The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
    ResourceGroupName string
    The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
    Serialization OutputBlobSerializationArgs
    A serialization block as defined below.
    StorageAccountName string
    The name of the Storage Account.
    StorageContainerName string
    The name of the Container within the Storage Account.
    StreamAnalyticsJobName string
    The name of the Stream Analytics Job. Changing this forces a new resource to be created.
    TimeFormat string
    The time format. Wherever {time} appears in path_pattern, the value of this property is used as the time format instead.
    AuthenticationMode string
    The authentication mode for the Stream Output. Possible values are Msi and ConnectionString. Defaults to ConnectionString.
    BatchMaxWaitTime string
    The maximum wait time per batch in hh:mm:ss e.g. 00:02:00 for two minutes.
    BatchMinRows int
    The minimum number of rows per batch (must be between 0 and 1000000).
    BlobWriteMode string
    Determines whether blob blocks are either committed automatically or appended. Possible values are Append and Once. Defaults to Append.
    Name string
    The name of the Stream Output. Changing this forces a new resource to be created.
    StorageAccountKey string
    The Access Key which should be used to connect to this Storage Account.
    dateFormat String
    The date format. Wherever {date} appears in path_pattern, the value of this property is used as the date format instead.
    pathPattern String
    The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
    resourceGroupName String
    The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
    serialization OutputBlobSerialization
    A serialization block as defined below.
    storageAccountName String
    The name of the Storage Account.
    storageContainerName String
    The name of the Container within the Storage Account.
    streamAnalyticsJobName String
    The name of the Stream Analytics Job. Changing this forces a new resource to be created.
    timeFormat String
    The time format. Wherever {time} appears in path_pattern, the value of this property is used as the time format instead.
    authenticationMode String
    The authentication mode for the Stream Output. Possible values are Msi and ConnectionString. Defaults to ConnectionString.
    batchMaxWaitTime String
    The maximum wait time per batch in hh:mm:ss e.g. 00:02:00 for two minutes.
    batchMinRows Integer
    The minimum number of rows per batch (must be between 0 and 1000000).
    blobWriteMode String
    Determines whether blob blocks are either committed automatically or appended. Possible values are Append and Once. Defaults to Append.
    name String
    The name of the Stream Output. Changing this forces a new resource to be created.
    storageAccountKey String
    The Access Key which should be used to connect to this Storage Account.
    dateFormat string
    The date format. Wherever {date} appears in path_pattern, the value of this property is used as the date format instead.
    pathPattern string
    The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
    resourceGroupName string
    The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
    serialization OutputBlobSerialization
    A serialization block as defined below.
    storageAccountName string
    The name of the Storage Account.
    storageContainerName string
    The name of the Container within the Storage Account.
    streamAnalyticsJobName string
    The name of the Stream Analytics Job. Changing this forces a new resource to be created.
    timeFormat string
    The time format. Wherever {time} appears in path_pattern, the value of this property is used as the time format instead.
    authenticationMode string
    The authentication mode for the Stream Output. Possible values are Msi and ConnectionString. Defaults to ConnectionString.
    batchMaxWaitTime string
    The maximum wait time per batch in hh:mm:ss e.g. 00:02:00 for two minutes.
    batchMinRows number
    The minimum number of rows per batch (must be between 0 and 1000000).
    blobWriteMode string
    Determines whether blob blocks are either committed automatically or appended. Possible values are Append and Once. Defaults to Append.
    name string
    The name of the Stream Output. Changing this forces a new resource to be created.
    storageAccountKey string
    The Access Key which should be used to connect to this Storage Account.
    date_format str
    The date format. Wherever {date} appears in path_pattern, the value of this property is used as the date format instead.
    path_pattern str
    The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
    resource_group_name str
    The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
    serialization OutputBlobSerializationArgs
    A serialization block as defined below.
    storage_account_name str
    The name of the Storage Account.
    storage_container_name str
    The name of the Container within the Storage Account.
    stream_analytics_job_name str
    The name of the Stream Analytics Job. Changing this forces a new resource to be created.
    time_format str
    The time format. Wherever {time} appears in path_pattern, the value of this property is used as the time format instead.
    authentication_mode str
    The authentication mode for the Stream Output. Possible values are Msi and ConnectionString. Defaults to ConnectionString.
    batch_max_wait_time str
    The maximum wait time per batch in hh:mm:ss e.g. 00:02:00 for two minutes.
    batch_min_rows int
    The minimum number of rows per batch (must be between 0 and 1000000).
    blob_write_mode str
    Determines whether blob blocks are either committed automatically or appended. Possible values are Append and Once. Defaults to Append.
    name str
    The name of the Stream Output. Changing this forces a new resource to be created.
    storage_account_key str
    The Access Key which should be used to connect to this Storage Account.
    dateFormat String
    The date format. Wherever {date} appears in path_pattern, the value of this property is used as the date format instead.
    pathPattern String
    The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
    resourceGroupName String
    The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
    serialization Property Map
    A serialization block as defined below.
    storageAccountName String
    The name of the Storage Account.
    storageContainerName String
    The name of the Container within the Storage Account.
    streamAnalyticsJobName String
    The name of the Stream Analytics Job. Changing this forces a new resource to be created.
    timeFormat String
    The time format. Wherever {time} appears in path_pattern, the value of this property is used as the time format instead.
    authenticationMode String
    The authentication mode for the Stream Output. Possible values are Msi and ConnectionString. Defaults to ConnectionString.
    batchMaxWaitTime String
    The maximum wait time per batch in hh:mm:ss e.g. 00:02:00 for two minutes.
    batchMinRows Number
    The minimum number of rows per batch (must be between 0 and 1000000).
    blobWriteMode String
    Determines whether blob blocks are either committed automatically or appended. Possible values are Append and Once. Defaults to Append.
    name String
    The name of the Stream Output. Changing this forces a new resource to be created.
    storageAccountKey String
    The Access Key which should be used to connect to this Storage Account.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the OutputBlob resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing OutputBlob Resource

    Get an existing OutputBlob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: OutputBlobState, opts?: CustomResourceOptions): OutputBlob
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            authentication_mode: Optional[str] = None,
            batch_max_wait_time: Optional[str] = None,
            batch_min_rows: Optional[int] = None,
            blob_write_mode: Optional[str] = None,
            date_format: Optional[str] = None,
            name: Optional[str] = None,
            path_pattern: Optional[str] = None,
            resource_group_name: Optional[str] = None,
            serialization: Optional[OutputBlobSerializationArgs] = None,
            storage_account_key: Optional[str] = None,
            storage_account_name: Optional[str] = None,
            storage_container_name: Optional[str] = None,
            stream_analytics_job_name: Optional[str] = None,
            time_format: Optional[str] = None) -> OutputBlob
    func GetOutputBlob(ctx *Context, name string, id IDInput, state *OutputBlobState, opts ...ResourceOption) (*OutputBlob, error)
    public static OutputBlob Get(string name, Input<string> id, OutputBlobState? state, CustomResourceOptions? opts = null)
    public static OutputBlob get(String name, Output<String> id, OutputBlobState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AuthenticationMode string
    The authentication mode for the Stream Output. Possible values are Msi and ConnectionString. Defaults to ConnectionString.
    BatchMaxWaitTime string
    The maximum wait time per batch in hh:mm:ss e.g. 00:02:00 for two minutes.
    BatchMinRows int
    The minimum number of rows per batch (must be between 0 and 1000000).
    BlobWriteMode string
    Determines whether blob blocks are either committed automatically or appended. Possible values are Append and Once. Defaults to Append.
    DateFormat string
    The date format. Wherever {date} appears in path_pattern, the value of this property is used as the date format instead.
    Name string
    The name of the Stream Output. Changing this forces a new resource to be created.
    PathPattern string
    The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
    ResourceGroupName string
    The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
    Serialization OutputBlobSerialization
    A serialization block as defined below.
    StorageAccountKey string
    The Access Key which should be used to connect to this Storage Account.
    StorageAccountName string
    The name of the Storage Account.
    StorageContainerName string
    The name of the Container within the Storage Account.
    StreamAnalyticsJobName string
    The name of the Stream Analytics Job. Changing this forces a new resource to be created.
    TimeFormat string
    The time format. Wherever {time} appears in path_pattern, the value of this property is used as the time format instead.
    AuthenticationMode string
    The authentication mode for the Stream Output. Possible values are Msi and ConnectionString. Defaults to ConnectionString.
    BatchMaxWaitTime string
    The maximum wait time per batch in hh:mm:ss e.g. 00:02:00 for two minutes.
    BatchMinRows int
    The minimum number of rows per batch (must be between 0 and 1000000).
    BlobWriteMode string
    Determines whether blob blocks are either committed automatically or appended. Possible values are Append and Once. Defaults to Append.
    DateFormat string
    The date format. Wherever {date} appears in path_pattern, the value of this property is used as the date format instead.
    Name string
    The name of the Stream Output. Changing this forces a new resource to be created.
    PathPattern string
    The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
    ResourceGroupName string
    The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
    Serialization OutputBlobSerializationArgs
    A serialization block as defined below.
    StorageAccountKey string
    The Access Key which should be used to connect to this Storage Account.
    StorageAccountName string
    The name of the Storage Account.
    StorageContainerName string
    The name of the Container within the Storage Account.
    StreamAnalyticsJobName string
    The name of the Stream Analytics Job. Changing this forces a new resource to be created.
    TimeFormat string
    The time format. Wherever {time} appears in path_pattern, the value of this property is used as the time format instead.
    authenticationMode String
    The authentication mode for the Stream Output. Possible values are Msi and ConnectionString. Defaults to ConnectionString.
    batchMaxWaitTime String
    The maximum wait time per batch in hh:mm:ss e.g. 00:02:00 for two minutes.
    batchMinRows Integer
    The minimum number of rows per batch (must be between 0 and 1000000).
    blobWriteMode String
    Determines whether blob blocks are either committed automatically or appended. Possible values are Append and Once. Defaults to Append.
    dateFormat String
    The date format. Wherever {date} appears in path_pattern, the value of this property is used as the date format instead.
    name String
    The name of the Stream Output. Changing this forces a new resource to be created.
    pathPattern String
    The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
    resourceGroupName String
    The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
    serialization OutputBlobSerialization
    A serialization block as defined below.
    storageAccountKey String
    The Access Key which should be used to connect to this Storage Account.
    storageAccountName String
    The name of the Storage Account.
    storageContainerName String
    The name of the Container within the Storage Account.
    streamAnalyticsJobName String
    The name of the Stream Analytics Job. Changing this forces a new resource to be created.
    timeFormat String
    The time format. Wherever {time} appears in path_pattern, the value of this property is used as the time format instead.
    authenticationMode string
    The authentication mode for the Stream Output. Possible values are Msi and ConnectionString. Defaults to ConnectionString.
    batchMaxWaitTime string
    The maximum wait time per batch in hh:mm:ss e.g. 00:02:00 for two minutes.
    batchMinRows number
    The minimum number of rows per batch (must be between 0 and 1000000).
    blobWriteMode string
    Determines whether blob blocks are either committed automatically or appended. Possible values are Append and Once. Defaults to Append.
    dateFormat string
    The date format. Wherever {date} appears in path_pattern, the value of this property is used as the date format instead.
    name string
    The name of the Stream Output. Changing this forces a new resource to be created.
    pathPattern string
    The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
    resourceGroupName string
    The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
    serialization OutputBlobSerialization
    A serialization block as defined below.
    storageAccountKey string
    The Access Key which should be used to connect to this Storage Account.
    storageAccountName string
    The name of the Storage Account.
    storageContainerName string
    The name of the Container within the Storage Account.
    streamAnalyticsJobName string
    The name of the Stream Analytics Job. Changing this forces a new resource to be created.
    timeFormat string
    The time format. Wherever {time} appears in path_pattern, the value of this property is used as the time format instead.
    authentication_mode str
    The authentication mode for the Stream Output. Possible values are Msi and ConnectionString. Defaults to ConnectionString.
    batch_max_wait_time str
    The maximum wait time per batch in hh:mm:ss e.g. 00:02:00 for two minutes.
    batch_min_rows int
    The minimum number of rows per batch (must be between 0 and 1000000).
    blob_write_mode str
    Determines whether blob blocks are either committed automatically or appended. Possible values are Append and Once. Defaults to Append.
    date_format str
    The date format. Wherever {date} appears in path_pattern, the value of this property is used as the date format instead.
    name str
    The name of the Stream Output. Changing this forces a new resource to be created.
    path_pattern str
    The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
    resource_group_name str
    The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
    serialization OutputBlobSerializationArgs
    A serialization block as defined below.
    storage_account_key str
    The Access Key which should be used to connect to this Storage Account.
    storage_account_name str
    The name of the Storage Account.
    storage_container_name str
    The name of the Container within the Storage Account.
    stream_analytics_job_name str
    The name of the Stream Analytics Job. Changing this forces a new resource to be created.
    time_format str
    The time format. Wherever {time} appears in path_pattern, the value of this property is used as the time format instead.
    authenticationMode String
    The authentication mode for the Stream Output. Possible values are Msi and ConnectionString. Defaults to ConnectionString.
    batchMaxWaitTime String
    The maximum wait time per batch in hh:mm:ss e.g. 00:02:00 for two minutes.
    batchMinRows Number
    The minimum number of rows per batch (must be between 0 and 1000000).
    blobWriteMode String
    Determines whether blob blocks are either committed automatically or appended. Possible values are Append and Once. Defaults to Append.
    dateFormat String
    The date format. Wherever {date} appears in path_pattern, the value of this property is used as the date format instead.
    name String
    The name of the Stream Output. Changing this forces a new resource to be created.
    pathPattern String
    The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
    resourceGroupName String
    The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
    serialization Property Map
    A serialization block as defined below.
    storageAccountKey String
    The Access Key which should be used to connect to this Storage Account.
    storageAccountName String
    The name of the Storage Account.
    storageContainerName String
    The name of the Container within the Storage Account.
    streamAnalyticsJobName String
    The name of the Stream Analytics Job. Changing this forces a new resource to be created.
    timeFormat String
    The time format. Wherever {time} appears in path_pattern, the value of this property is used as the time format instead.

    Supporting Types

    OutputBlobSerialization, OutputBlobSerializationArgs

    Type string

    The serialization format used for outgoing data streams. Possible values are Avro, Csv, Json and Parquet.

    NOTE: batch_max_wait_time and batch_min_rows are required when type is set to Parquet

    Encoding string

    The encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. It currently can only be set to UTF8.

    NOTE: This is required when type is set to Csv or Json.

    FieldDelimiter string

    The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are (space), , (comma), (tab), | (pipe) and ;.

    NOTE: This is required when type is set to Csv.

    Format string

    Specifies the format of the JSON the output will be written in. Possible values are Array and LineSeparated.

    NOTE: This is Required and can only be specified when type is set to Json.

    Type string

    The serialization format used for outgoing data streams. Possible values are Avro, Csv, Json and Parquet.

    NOTE: batch_max_wait_time and batch_min_rows are required when type is set to Parquet

    Encoding string

    The encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. It currently can only be set to UTF8.

    NOTE: This is required when type is set to Csv or Json.

    FieldDelimiter string

    The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are (space), , (comma), (tab), | (pipe) and ;.

    NOTE: This is required when type is set to Csv.

    Format string

    Specifies the format of the JSON the output will be written in. Possible values are Array and LineSeparated.

    NOTE: This is Required and can only be specified when type is set to Json.

    type String

    The serialization format used for outgoing data streams. Possible values are Avro, Csv, Json and Parquet.

    NOTE: batch_max_wait_time and batch_min_rows are required when type is set to Parquet

    encoding String

    The encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. It currently can only be set to UTF8.

    NOTE: This is required when type is set to Csv or Json.

    fieldDelimiter String

    The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are (space), , (comma), (tab), | (pipe) and ;.

    NOTE: This is required when type is set to Csv.

    format String

    Specifies the format of the JSON the output will be written in. Possible values are Array and LineSeparated.

    NOTE: This is Required and can only be specified when type is set to Json.

    type string

    The serialization format used for outgoing data streams. Possible values are Avro, Csv, Json and Parquet.

    NOTE: batch_max_wait_time and batch_min_rows are required when type is set to Parquet

    encoding string

    The encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. It currently can only be set to UTF8.

    NOTE: This is required when type is set to Csv or Json.

    fieldDelimiter string

    The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are (space), , (comma), (tab), | (pipe) and ;.

    NOTE: This is required when type is set to Csv.

    format string

    Specifies the format of the JSON the output will be written in. Possible values are Array and LineSeparated.

    NOTE: This is Required and can only be specified when type is set to Json.

    type str

    The serialization format used for outgoing data streams. Possible values are Avro, Csv, Json and Parquet.

    NOTE: batch_max_wait_time and batch_min_rows are required when type is set to Parquet

    encoding str

    The encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. It currently can only be set to UTF8.

    NOTE: This is required when type is set to Csv or Json.

    field_delimiter str

    The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are (space), , (comma), (tab), | (pipe) and ;.

    NOTE: This is required when type is set to Csv.

    format str

    Specifies the format of the JSON the output will be written in. Possible values are Array and LineSeparated.

    NOTE: This is Required and can only be specified when type is set to Json.

    type String

    The serialization format used for outgoing data streams. Possible values are Avro, Csv, Json and Parquet.

    NOTE: batch_max_wait_time and batch_min_rows are required when type is set to Parquet

    encoding String

    The encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. It currently can only be set to UTF8.

    NOTE: This is required when type is set to Csv or Json.

    fieldDelimiter String

    The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are (space), , (comma), (tab), | (pipe) and ;.

    NOTE: This is required when type is set to Csv.

    format String

    Specifies the format of the JSON the output will be written in. Possible values are Array and LineSeparated.

    NOTE: This is Required and can only be specified when type is set to Json.

    Import

    Stream Analytics Outputs to Blob Storage can be imported using the resource id, e.g.

    $ pulumi import azure:streamanalytics/outputBlob:OutputBlob example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.StreamAnalytics/streamingJobs/job1/outputs/output1
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    Azure Classic pulumi/pulumi-azure
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the azurerm Terraform Provider.
    azure logo

    We recommend using Azure Native.

    Azure Classic v5.72.0 published on Monday, Apr 15, 2024 by Pulumi