Configure Azure Stream Analytics Outputs

The azure-native:streamanalytics:Output resource, part of the Pulumi Azure Native provider, defines where Stream Analytics jobs write processed events: the destination type, connection details, and serialization format. This guide focuses on five capabilities: Blob Storage with time-based partitioning, Event Hub for real-time consumers, SQL Database for relational storage, Service Bus with Avro serialization, and Power BI for live dashboards.

Outputs belong to Stream Analytics jobs and reference destination resources (storage accounts, Event Hubs, databases) that must exist separately. The examples are intentionally small. Combine them with your own Stream Analytics jobs and destination infrastructure.

Write CSV files to Blob Storage with time-based partitioning

Many analytics pipelines write processed events to Blob Storage as CSV files, organizing output by date and time for downstream batch processing or archival.

import * as pulumi from "@pulumi/pulumi";
import * as azure_native from "@pulumi/azure-native";

const output = new azure_native.streamanalytics.Output("output", {
    datasource: {
        container: "state",
        dateFormat: "yyyy/MM/dd",
        pathPattern: "{date}/{time}",
        storageAccounts: [{
            accountKey: "accountKey==",
            accountName: "someAccountName",
        }],
        timeFormat: "HH",
        type: "Microsoft.Storage/Blob",
    },
    jobName: "sj900",
    outputName: "output1623",
    resourceGroupName: "sjrg5023",
    serialization: {
        encoding: azure_native.streamanalytics.Encoding.UTF8,
        fieldDelimiter: ",",
        type: "Csv",
    },
});
import pulumi
import pulumi_azure_native as azure_native

output = azure_native.streamanalytics.Output("output",
    datasource={
        "container": "state",
        "date_format": "yyyy/MM/dd",
        "path_pattern": "{date}/{time}",
        "storage_accounts": [{
            "account_key": "accountKey==",
            "account_name": "someAccountName",
        }],
        "time_format": "HH",
        "type": "Microsoft.Storage/Blob",
    },
    job_name="sj900",
    output_name="output1623",
    resource_group_name="sjrg5023",
    serialization={
        "encoding": azure_native.streamanalytics.Encoding.UTF8,
        "field_delimiter": ",",
        "type": "Csv",
    })
package main

import (
	streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v3"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
			Datasource: &streamanalytics.BlobOutputDataSourceArgs{
				Container:   pulumi.String("state"),
				DateFormat:  pulumi.String("yyyy/MM/dd"),
				PathPattern: pulumi.String("{date}/{time}"),
				StorageAccounts: streamanalytics.StorageAccountArray{
					&streamanalytics.StorageAccountArgs{
						AccountKey:  pulumi.String("accountKey=="),
						AccountName: pulumi.String("someAccountName"),
					},
				},
				TimeFormat: pulumi.String("HH"),
				Type:       pulumi.String("Microsoft.Storage/Blob"),
			},
			JobName:           pulumi.String("sj900"),
			OutputName:        pulumi.String("output1623"),
			ResourceGroupName: pulumi.String("sjrg5023"),
			Serialization: &streamanalytics.CsvSerializationArgs{
				Encoding:       pulumi.String(streamanalytics.EncodingUTF8),
				FieldDelimiter: pulumi.String(","),
				Type:           pulumi.String("Csv"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;

return await Deployment.RunAsync(() => 
{
    var output = new AzureNative.StreamAnalytics.Output("output", new()
    {
        Datasource = new AzureNative.StreamAnalytics.Inputs.BlobOutputDataSourceArgs
        {
            Container = "state",
            DateFormat = "yyyy/MM/dd",
            PathPattern = "{date}/{time}",
            StorageAccounts = new[]
            {
                new AzureNative.StreamAnalytics.Inputs.StorageAccountArgs
                {
                    AccountKey = "accountKey==",
                    AccountName = "someAccountName",
                },
            },
            TimeFormat = "HH",
            Type = "Microsoft.Storage/Blob",
        },
        JobName = "sj900",
        OutputName = "output1623",
        ResourceGroupName = "sjrg5023",
        Serialization = new AzureNative.StreamAnalytics.Inputs.CsvSerializationArgs
        {
            Encoding = AzureNative.StreamAnalytics.Encoding.UTF8,
            FieldDelimiter = ",",
            Type = "Csv",
        },
    });

});
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var output = new Output("output", OutputArgs.builder()
            .datasource(BlobOutputDataSourceArgs.builder()
                .container("state")
                .dateFormat("yyyy/MM/dd")
                .pathPattern("{date}/{time}")
                .storageAccounts(StorageAccountArgs.builder()
                    .accountKey("accountKey==")
                    .accountName("someAccountName")
                    .build())
                .timeFormat("HH")
                .type("Microsoft.Storage/Blob")
                .build())
            .jobName("sj900")
            .outputName("output1623")
            .resourceGroupName("sjrg5023")
            .serialization(CsvSerializationArgs.builder()
                .encoding("UTF8")
                .fieldDelimiter(",")
                .type("Csv")
                .build())
            .build());

    }
}
resources:
  output:
    type: azure-native:streamanalytics:Output
    properties:
      datasource:
        container: state
        dateFormat: yyyy/MM/dd
        pathPattern: '{date}/{time}'
        storageAccounts:
          - accountKey: accountKey==
            accountName: someAccountName
        timeFormat: HH
        type: Microsoft.Storage/Blob
      jobName: sj900
      outputName: output1623
      resourceGroupName: sjrg5023
      serialization:
        encoding: UTF8
        fieldDelimiter: ','
        type: Csv

The datasource property configures the Blob Storage destination. The pathPattern uses {date}/{time} placeholders that Stream Analytics replaces with actual values based on dateFormat and timeFormat. The serialization property specifies CSV format with UTF-8 encoding and comma delimiters. Files are written to the container with paths like state/2024/01/15/14/.

Stream JSON events to Event Hub for real-time consumers

Event Hub outputs enable real-time downstream processing by publishing transformed events to topics that other services can consume immediately.

import * as pulumi from "@pulumi/pulumi";
import * as azure_native from "@pulumi/azure-native";

const output = new azure_native.streamanalytics.Output("output", {
    datasource: {
        eventHubName: "sdkeventhub",
        partitionKey: "partitionKey",
        serviceBusNamespace: "sdktest",
        sharedAccessPolicyKey: "sharedAccessPolicyKey=",
        sharedAccessPolicyName: "RootManageSharedAccessKey",
        type: "Microsoft.ServiceBus/EventHub",
    },
    jobName: "sj3310",
    outputName: "output5195",
    resourceGroupName: "sjrg6912",
    serialization: {
        encoding: azure_native.streamanalytics.Encoding.UTF8,
        format: azure_native.streamanalytics.JsonOutputSerializationFormat.Array,
        type: "Json",
    },
});
import pulumi
import pulumi_azure_native as azure_native

output = azure_native.streamanalytics.Output("output",
    datasource={
        "event_hub_name": "sdkeventhub",
        "partition_key": "partitionKey",
        "service_bus_namespace": "sdktest",
        "shared_access_policy_key": "sharedAccessPolicyKey=",
        "shared_access_policy_name": "RootManageSharedAccessKey",
        "type": "Microsoft.ServiceBus/EventHub",
    },
    job_name="sj3310",
    output_name="output5195",
    resource_group_name="sjrg6912",
    serialization={
        "encoding": azure_native.streamanalytics.Encoding.UTF8,
        "format": azure_native.streamanalytics.JsonOutputSerializationFormat.ARRAY,
        "type": "Json",
    })
package main

import (
	streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v3"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
			Datasource: &streamanalytics.EventHubOutputDataSourceArgs{
				EventHubName:           pulumi.String("sdkeventhub"),
				PartitionKey:           pulumi.String("partitionKey"),
				ServiceBusNamespace:    pulumi.String("sdktest"),
				SharedAccessPolicyKey:  pulumi.String("sharedAccessPolicyKey="),
				SharedAccessPolicyName: pulumi.String("RootManageSharedAccessKey"),
				Type:                   pulumi.String("Microsoft.ServiceBus/EventHub"),
			},
			JobName:           pulumi.String("sj3310"),
			OutputName:        pulumi.String("output5195"),
			ResourceGroupName: pulumi.String("sjrg6912"),
			Serialization: &streamanalytics.JsonSerializationArgs{
				Encoding: pulumi.String(streamanalytics.EncodingUTF8),
				Format:   pulumi.String(streamanalytics.JsonOutputSerializationFormatArray),
				Type:     pulumi.String("Json"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;

return await Deployment.RunAsync(() => 
{
    var output = new AzureNative.StreamAnalytics.Output("output", new()
    {
        Datasource = new AzureNative.StreamAnalytics.Inputs.EventHubOutputDataSourceArgs
        {
            EventHubName = "sdkeventhub",
            PartitionKey = "partitionKey",
            ServiceBusNamespace = "sdktest",
            SharedAccessPolicyKey = "sharedAccessPolicyKey=",
            SharedAccessPolicyName = "RootManageSharedAccessKey",
            Type = "Microsoft.ServiceBus/EventHub",
        },
        JobName = "sj3310",
        OutputName = "output5195",
        ResourceGroupName = "sjrg6912",
        Serialization = new AzureNative.StreamAnalytics.Inputs.JsonSerializationArgs
        {
            Encoding = AzureNative.StreamAnalytics.Encoding.UTF8,
            Format = AzureNative.StreamAnalytics.JsonOutputSerializationFormat.Array,
            Type = "Json",
        },
    });

});
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var output = new Output("output", OutputArgs.builder()
            .datasource(EventHubOutputDataSourceArgs.builder()
                .eventHubName("sdkeventhub")
                .partitionKey("partitionKey")
                .serviceBusNamespace("sdktest")
                .sharedAccessPolicyKey("sharedAccessPolicyKey=")
                .sharedAccessPolicyName("RootManageSharedAccessKey")
                .type("Microsoft.ServiceBus/EventHub")
                .build())
            .jobName("sj3310")
            .outputName("output5195")
            .resourceGroupName("sjrg6912")
            .serialization(JsonSerializationArgs.builder()
                .encoding("UTF8")
                .format("Array")
                .type("Json")
                .build())
            .build());

    }
}
resources:
  output:
    type: azure-native:streamanalytics:Output
    properties:
      datasource:
        eventHubName: sdkeventhub
        partitionKey: partitionKey
        serviceBusNamespace: sdktest
        sharedAccessPolicyKey: sharedAccessPolicyKey=
        sharedAccessPolicyName: RootManageSharedAccessKey
        type: Microsoft.ServiceBus/EventHub
      jobName: sj3310
      outputName: output5195
      resourceGroupName: sjrg6912
      serialization:
        encoding: UTF8
        format: Array
        type: Json

The datasource property connects to an Event Hub within a Service Bus namespace. The partitionKey property determines how events are distributed across Event Hub partitions. The serialization property configures JSON output with Array format, which writes events as a JSON array. This enables real-time consumers to process events as they arrive.

Load streaming data into Azure SQL Database tables

SQL database outputs enable direct loading of streaming results into relational tables for querying and reporting.

import * as pulumi from "@pulumi/pulumi";
import * as azure_native from "@pulumi/azure-native";

const output = new azure_native.streamanalytics.Output("output", {
    datasource: {
        database: "someDatabase",
        password: "somePassword",
        server: "someServer",
        table: "someTable",
        type: "Microsoft.Sql/Server/Database",
        user: "<user>",
    },
    jobName: "sj6458",
    outputName: "output1755",
    resourceGroupName: "sjrg2157",
});
import pulumi
import pulumi_azure_native as azure_native

output = azure_native.streamanalytics.Output("output",
    datasource={
        "database": "someDatabase",
        "password": "somePassword",
        "server": "someServer",
        "table": "someTable",
        "type": "Microsoft.Sql/Server/Database",
        "user": "<user>",
    },
    job_name="sj6458",
    output_name="output1755",
    resource_group_name="sjrg2157")
package main

import (
	streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v3"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
			Datasource: &streamanalytics.AzureSqlDatabaseOutputDataSourceArgs{
				Database: pulumi.String("someDatabase"),
				Password: pulumi.String("somePassword"),
				Server:   pulumi.String("someServer"),
				Table:    pulumi.String("someTable"),
				Type:     pulumi.String("Microsoft.Sql/Server/Database"),
				User:     pulumi.String("<user>"),
			},
			JobName:           pulumi.String("sj6458"),
			OutputName:        pulumi.String("output1755"),
			ResourceGroupName: pulumi.String("sjrg2157"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;

return await Deployment.RunAsync(() => 
{
    var output = new AzureNative.StreamAnalytics.Output("output", new()
    {
        Datasource = new AzureNative.StreamAnalytics.Inputs.AzureSqlDatabaseOutputDataSourceArgs
        {
            Database = "someDatabase",
            Password = "somePassword",
            Server = "someServer",
            Table = "someTable",
            Type = "Microsoft.Sql/Server/Database",
            User = "<user>",
        },
        JobName = "sj6458",
        OutputName = "output1755",
        ResourceGroupName = "sjrg2157",
    });

});
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var output = new Output("output", OutputArgs.builder()
            .datasource(AzureSqlDatabaseOutputDataSourceArgs.builder()
                .database("someDatabase")
                .password("somePassword")
                .server("someServer")
                .table("someTable")
                .type("Microsoft.Sql/Server/Database")
                .user("<user>")
                .build())
            .jobName("sj6458")
            .outputName("output1755")
            .resourceGroupName("sjrg2157")
            .build());

    }
}
resources:
  output:
    type: azure-native:streamanalytics:Output
    properties:
      datasource:
        database: someDatabase
        password: somePassword
        server: someServer
        table: someTable
        type: Microsoft.Sql/Server/Database
        user: <user>
      jobName: sj6458
      outputName: output1755
      resourceGroupName: sjrg2157

The datasource property specifies the SQL server, database, and target table. Stream Analytics writes rows directly to the table as events arrive. The table schema must match the output schema from your query. This enables immediate querying of streaming results through standard SQL tools.

Send Avro-serialized messages to Service Bus Queue

Service Bus Queue outputs enable reliable message delivery with Avro serialization, which provides compact binary encoding and schema evolution support.

import * as pulumi from "@pulumi/pulumi";
import * as azure_native from "@pulumi/azure-native";

const output = new azure_native.streamanalytics.Output("output", {
    datasource: {
        propertyColumns: [
            "column1",
            "column2",
        ],
        queueName: "sdkqueue",
        serviceBusNamespace: "sdktest",
        sharedAccessPolicyKey: "sharedAccessPolicyKey=",
        sharedAccessPolicyName: "RootManageSharedAccessKey",
        systemPropertyColumns: {
            MessageId: "col3",
            PartitionKey: "col4",
        },
        type: "Microsoft.ServiceBus/Queue",
    },
    jobName: "sj5095",
    outputName: "output3456",
    resourceGroupName: "sjrg3410",
    serialization: {
        type: "Avro",
    },
});
import pulumi
import pulumi_azure_native as azure_native

output = azure_native.streamanalytics.Output("output",
    datasource={
        "property_columns": [
            "column1",
            "column2",
        ],
        "queue_name": "sdkqueue",
        "service_bus_namespace": "sdktest",
        "shared_access_policy_key": "sharedAccessPolicyKey=",
        "shared_access_policy_name": "RootManageSharedAccessKey",
        "system_property_columns": {
            "MessageId": "col3",
            "PartitionKey": "col4",
        },
        "type": "Microsoft.ServiceBus/Queue",
    },
    job_name="sj5095",
    output_name="output3456",
    resource_group_name="sjrg3410",
    serialization={
        "type": "Avro",
    })
package main

import (
	streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v3"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
			Datasource: &streamanalytics.ServiceBusQueueOutputDataSourceArgs{
				PropertyColumns: pulumi.StringArray{
					pulumi.String("column1"),
					pulumi.String("column2"),
				},
				QueueName:              pulumi.String("sdkqueue"),
				ServiceBusNamespace:    pulumi.String("sdktest"),
				SharedAccessPolicyKey:  pulumi.String("sharedAccessPolicyKey="),
				SharedAccessPolicyName: pulumi.String("RootManageSharedAccessKey"),
				SystemPropertyColumns: pulumi.Any(map[string]interface{}{
					"MessageId":    "col3",
					"PartitionKey": "col4",
				}),
				Type: pulumi.String("Microsoft.ServiceBus/Queue"),
			},
			JobName:           pulumi.String("sj5095"),
			OutputName:        pulumi.String("output3456"),
			ResourceGroupName: pulumi.String("sjrg3410"),
			Serialization: &streamanalytics.AvroSerializationArgs{
				Type: pulumi.String("Avro"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;

return await Deployment.RunAsync(() => 
{
    var output = new AzureNative.StreamAnalytics.Output("output", new()
    {
        Datasource = new AzureNative.StreamAnalytics.Inputs.ServiceBusQueueOutputDataSourceArgs
        {
            PropertyColumns = new[]
            {
                "column1",
                "column2",
            },
            QueueName = "sdkqueue",
            ServiceBusNamespace = "sdktest",
            SharedAccessPolicyKey = "sharedAccessPolicyKey=",
            SharedAccessPolicyName = "RootManageSharedAccessKey",
            SystemPropertyColumns = new Dictionary<string, object?>
            {
                ["MessageId"] = "col3",
                ["PartitionKey"] = "col4",
            },
            Type = "Microsoft.ServiceBus/Queue",
        },
        JobName = "sj5095",
        OutputName = "output3456",
        ResourceGroupName = "sjrg3410",
        Serialization = new AzureNative.StreamAnalytics.Inputs.AvroSerializationArgs
        {
            Type = "Avro",
        },
    });

});
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var output = new Output("output", OutputArgs.builder()
            .datasource(ServiceBusQueueOutputDataSourceArgs.builder()
                .propertyColumns(                
                    "column1",
                    "column2")
                .queueName("sdkqueue")
                .serviceBusNamespace("sdktest")
                .sharedAccessPolicyKey("sharedAccessPolicyKey=")
                .sharedAccessPolicyName("RootManageSharedAccessKey")
                .systemPropertyColumns(Map.ofEntries(
                    Map.entry("MessageId", "col3"),
                    Map.entry("PartitionKey", "col4")
                ))
                .type("Microsoft.ServiceBus/Queue")
                .build())
            .jobName("sj5095")
            .outputName("output3456")
            .resourceGroupName("sjrg3410")
            .serialization(AvroSerializationArgs.builder()
                .type("Avro")
                .build())
            .build());

    }
}
resources:
  output:
    type: azure-native:streamanalytics:Output
    properties:
      datasource:
        propertyColumns:
          - column1
          - column2
        queueName: sdkqueue
        serviceBusNamespace: sdktest
        sharedAccessPolicyKey: sharedAccessPolicyKey=
        sharedAccessPolicyName: RootManageSharedAccessKey
        systemPropertyColumns:
          MessageId: col3
          PartitionKey: col4
        type: Microsoft.ServiceBus/Queue
      jobName: sj5095
      outputName: output3456
      resourceGroupName: sjrg3410
      serialization:
        type: Avro

The datasource property configures the Service Bus namespace and queue name. The propertyColumns array specifies which event fields to include as custom message properties. The systemPropertyColumns map assigns event fields to Service Bus system properties like MessageId and PartitionKey. The serialization property enables Avro format, which produces compact binary messages.

Visualize streaming data in Power BI dashboards

Power BI outputs push streaming data directly into Power BI datasets for real-time dashboard visualization without intermediate storage.

import * as pulumi from "@pulumi/pulumi";
import * as azure_native from "@pulumi/azure-native";

const output = new azure_native.streamanalytics.Output("output", {
    datasource: {
        dataset: "someDataset",
        groupId: "ac40305e-3e8d-43ac-8161-c33799f43e95",
        groupName: "MyPowerBIGroup",
        refreshToken: "someRefreshToken==",
        table: "someTable",
        tokenUserDisplayName: "Bob Smith",
        tokenUserPrincipalName: "bobsmith@contoso.com",
        type: "PowerBI",
    },
    jobName: "sj2331",
    outputName: "output3022",
    resourceGroupName: "sjrg7983",
});
import pulumi
import pulumi_azure_native as azure_native

output = azure_native.streamanalytics.Output("output",
    datasource={
        "dataset": "someDataset",
        "group_id": "ac40305e-3e8d-43ac-8161-c33799f43e95",
        "group_name": "MyPowerBIGroup",
        "refresh_token": "someRefreshToken==",
        "table": "someTable",
        "token_user_display_name": "Bob Smith",
        "token_user_principal_name": "bobsmith@contoso.com",
        "type": "PowerBI",
    },
    job_name="sj2331",
    output_name="output3022",
    resource_group_name="sjrg7983")
package main

import (
	streamanalytics "github.com/pulumi/pulumi-azure-native-sdk/streamanalytics/v3"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := streamanalytics.NewOutput(ctx, "output", &streamanalytics.OutputArgs{
			Datasource: &streamanalytics.PowerBIOutputDataSourceArgs{
				Dataset:                pulumi.String("someDataset"),
				GroupId:                pulumi.String("ac40305e-3e8d-43ac-8161-c33799f43e95"),
				GroupName:              pulumi.String("MyPowerBIGroup"),
				RefreshToken:           pulumi.String("someRefreshToken=="),
				Table:                  pulumi.String("someTable"),
				TokenUserDisplayName:   pulumi.String("Bob Smith"),
				TokenUserPrincipalName: pulumi.String("bobsmith@contoso.com"),
				Type:                   pulumi.String("PowerBI"),
			},
			JobName:           pulumi.String("sj2331"),
			OutputName:        pulumi.String("output3022"),
			ResourceGroupName: pulumi.String("sjrg7983"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;

return await Deployment.RunAsync(() => 
{
    var output = new AzureNative.StreamAnalytics.Output("output", new()
    {
        Datasource = new AzureNative.StreamAnalytics.Inputs.PowerBIOutputDataSourceArgs
        {
            Dataset = "someDataset",
            GroupId = "ac40305e-3e8d-43ac-8161-c33799f43e95",
            GroupName = "MyPowerBIGroup",
            RefreshToken = "someRefreshToken==",
            Table = "someTable",
            TokenUserDisplayName = "Bob Smith",
            TokenUserPrincipalName = "bobsmith@contoso.com",
            Type = "PowerBI",
        },
        JobName = "sj2331",
        OutputName = "output3022",
        ResourceGroupName = "sjrg7983",
    });

});
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.streamanalytics.Output;
import com.pulumi.azurenative.streamanalytics.OutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var output = new Output("output", OutputArgs.builder()
            .datasource(PowerBIOutputDataSourceArgs.builder()
                .dataset("someDataset")
                .groupId("ac40305e-3e8d-43ac-8161-c33799f43e95")
                .groupName("MyPowerBIGroup")
                .refreshToken("someRefreshToken==")
                .table("someTable")
                .tokenUserDisplayName("Bob Smith")
                .tokenUserPrincipalName("bobsmith@contoso.com")
                .type("PowerBI")
                .build())
            .jobName("sj2331")
            .outputName("output3022")
            .resourceGroupName("sjrg7983")
            .build());

    }
}
resources:
  output:
    type: azure-native:streamanalytics:Output
    properties:
      datasource:
        dataset: someDataset
        groupId: ac40305e-3e8d-43ac-8161-c33799f43e95
        groupName: MyPowerBIGroup
        refreshToken: someRefreshToken==
        table: someTable
        tokenUserDisplayName: Bob Smith
        tokenUserPrincipalName: bobsmith@contoso.com
        type: PowerBI
      jobName: sj2331
      outputName: output3022
      resourceGroupName: sjrg7983

The datasource property connects to a Power BI workspace (group) and specifies the target dataset and table. The refreshToken and token user properties authenticate with Power BI using OAuth. Stream Analytics creates or updates the dataset automatically. This enables live dashboards that update as events arrive, without building separate data pipelines.

Beyond these examples

These snippets focus on specific output destination features: destination types (Blob, Event Hub, SQL, Service Bus, Power BI), serialization formats (CSV, JSON, Avro), and partitioning and batching controls. They’re intentionally minimal rather than full streaming pipelines.

The examples reference pre-existing infrastructure such as Stream Analytics jobs, destination resources (storage accounts, Event Hubs, SQL databases, Service Bus namespaces), and authentication credentials or managed identities. They focus on configuring the output rather than provisioning the surrounding infrastructure.

To keep things focused, common output patterns are omitted, including:

  • Managed identity authentication (authenticationMode)
  • Batch size and writer count tuning (maxBatchCount, maxWriterCount)
  • Time and size windowing (timeWindow, sizeWindow)
  • Blob write modes and path prefixes

These omissions are intentional: the goal is to illustrate how each output destination is wired, not provide drop-in streaming modules. See the Stream Analytics Output resource reference for all available configuration options.

Let's configure Azure Stream Analytics Outputs

Get started with Pulumi Cloud, then follow our quick setup guide to deploy this infrastructure.

Try Pulumi Cloud for FREE

Frequently Asked Questions

Configuration & Required Fields
What fields are required when creating an output?
You must specify datasource and serialization on PUT (CreateOrReplace) requests. The datasource defines where data will be written, and serialization defines the output format.
What output destinations are supported?
Stream Analytics supports multiple output types including DocumentDB, Event Hub, Blob Storage, Azure SQL Database, Azure Synapse (Data Warehouse), Azure Data Lake Store, Azure Functions, Azure Table Storage, Power BI, Service Bus Queue, Service Bus Topic, and Gateway Message Bus.
What serialization formats can I use for outputs?
Three formats are supported: Avro, CSV (with configurable encoding and field delimiter), and JSON (with Array or LineSeparated format options).
Immutability & Updates
What properties can't be changed after creating an output?
The jobName, outputName, and resourceGroupName properties are immutable. To change these, you must delete and recreate the output.
What's the etag property used for?
The etag is an opaque string for detecting resource changes between requests. Use it in If-Match or If-None-Match headers for optimistic concurrency control during write operations.
Datasource-Specific Features
How do I configure batching for Azure Functions output?
Set maxBatchCount and maxBatchSize in your AzureFunctionOutputDataSource configuration. For example, maxBatchCount: 100 and maxBatchSize: 256.
How do I organize blob output files by date and time?
Configure pathPattern with placeholders like {date}/{time}, then specify dateFormat (e.g., ‘yyyy/MM/dd’) and timeFormat (e.g., ‘HH’) to control the directory structure.
How do I map custom properties to Service Bus message properties?
Use propertyColumns to specify custom property columns and systemPropertyColumns to map system properties like MessageId and PartitionKey to your data columns.
API Versions & Advanced Topics
How do I use a different API version for this resource?
Generate a local SDK package using pulumi package add azure-native streamanalytics [ApiVersion]. The default is 2020-03-01, with 2021-10-01-preview also available.

Using a different cloud?

Explore analytics guides for other cloud providers: