We recommend using Azure Native.
azure.loganalytics.DataExportRule
Explore with Pulumi AI
Manages a Log Analytics Data Export Rule.
Example Usage
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() =>
{
var exampleResourceGroup = new Azure.Core.ResourceGroup("exampleResourceGroup", new()
{
Location = "West Europe",
});
var exampleAnalyticsWorkspace = new Azure.OperationalInsights.AnalyticsWorkspace("exampleAnalyticsWorkspace", new()
{
Location = exampleResourceGroup.Location,
ResourceGroupName = exampleResourceGroup.Name,
Sku = "PerGB2018",
RetentionInDays = 30,
});
var exampleAccount = new Azure.Storage.Account("exampleAccount", new()
{
ResourceGroupName = exampleResourceGroup.Name,
Location = exampleResourceGroup.Location,
AccountTier = "Standard",
AccountReplicationType = "LRS",
});
var exampleDataExportRule = new Azure.LogAnalytics.DataExportRule("exampleDataExportRule", new()
{
ResourceGroupName = exampleResourceGroup.Name,
WorkspaceResourceId = exampleAnalyticsWorkspace.Id,
DestinationResourceId = exampleAccount.Id,
TableNames = new[]
{
"Heartbeat",
},
Enabled = true,
});
});
package main
import (
"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/loganalytics"
"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/operationalinsights"
"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
exampleResourceGroup, err := core.NewResourceGroup(ctx, "exampleResourceGroup", &core.ResourceGroupArgs{
Location: pulumi.String("West Europe"),
})
if err != nil {
return err
}
exampleAnalyticsWorkspace, err := operationalinsights.NewAnalyticsWorkspace(ctx, "exampleAnalyticsWorkspace", &operationalinsights.AnalyticsWorkspaceArgs{
Location: exampleResourceGroup.Location,
ResourceGroupName: exampleResourceGroup.Name,
Sku: pulumi.String("PerGB2018"),
RetentionInDays: pulumi.Int(30),
})
if err != nil {
return err
}
exampleAccount, err := storage.NewAccount(ctx, "exampleAccount", &storage.AccountArgs{
ResourceGroupName: exampleResourceGroup.Name,
Location: exampleResourceGroup.Location,
AccountTier: pulumi.String("Standard"),
AccountReplicationType: pulumi.String("LRS"),
})
if err != nil {
return err
}
_, err = loganalytics.NewDataExportRule(ctx, "exampleDataExportRule", &loganalytics.DataExportRuleArgs{
ResourceGroupName: exampleResourceGroup.Name,
WorkspaceResourceId: exampleAnalyticsWorkspace.ID(),
DestinationResourceId: exampleAccount.ID(),
TableNames: pulumi.StringArray{
pulumi.String("Heartbeat"),
},
Enabled: pulumi.Bool(true),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.operationalinsights.AnalyticsWorkspace;
import com.pulumi.azure.operationalinsights.AnalyticsWorkspaceArgs;
import com.pulumi.azure.storage.Account;
import com.pulumi.azure.storage.AccountArgs;
import com.pulumi.azure.loganalytics.DataExportRule;
import com.pulumi.azure.loganalytics.DataExportRuleArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()
.location("West Europe")
.build());
var exampleAnalyticsWorkspace = new AnalyticsWorkspace("exampleAnalyticsWorkspace", AnalyticsWorkspaceArgs.builder()
.location(exampleResourceGroup.location())
.resourceGroupName(exampleResourceGroup.name())
.sku("PerGB2018")
.retentionInDays(30)
.build());
var exampleAccount = new Account("exampleAccount", AccountArgs.builder()
.resourceGroupName(exampleResourceGroup.name())
.location(exampleResourceGroup.location())
.accountTier("Standard")
.accountReplicationType("LRS")
.build());
var exampleDataExportRule = new DataExportRule("exampleDataExportRule", DataExportRuleArgs.builder()
.resourceGroupName(exampleResourceGroup.name())
.workspaceResourceId(exampleAnalyticsWorkspace.id())
.destinationResourceId(exampleAccount.id())
.tableNames("Heartbeat")
.enabled(true)
.build());
}
}
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_analytics_workspace = azure.operationalinsights.AnalyticsWorkspace("exampleAnalyticsWorkspace",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
sku="PerGB2018",
retention_in_days=30)
example_account = azure.storage.Account("exampleAccount",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
account_tier="Standard",
account_replication_type="LRS")
example_data_export_rule = azure.loganalytics.DataExportRule("exampleDataExportRule",
resource_group_name=example_resource_group.name,
workspace_resource_id=example_analytics_workspace.id,
destination_resource_id=example_account.id,
table_names=["Heartbeat"],
enabled=True)
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const exampleResourceGroup = new azure.core.ResourceGroup("exampleResourceGroup", {location: "West Europe"});
const exampleAnalyticsWorkspace = new azure.operationalinsights.AnalyticsWorkspace("exampleAnalyticsWorkspace", {
location: exampleResourceGroup.location,
resourceGroupName: exampleResourceGroup.name,
sku: "PerGB2018",
retentionInDays: 30,
});
const exampleAccount = new azure.storage.Account("exampleAccount", {
resourceGroupName: exampleResourceGroup.name,
location: exampleResourceGroup.location,
accountTier: "Standard",
accountReplicationType: "LRS",
});
const exampleDataExportRule = new azure.loganalytics.DataExportRule("exampleDataExportRule", {
resourceGroupName: exampleResourceGroup.name,
workspaceResourceId: exampleAnalyticsWorkspace.id,
destinationResourceId: exampleAccount.id,
tableNames: ["Heartbeat"],
enabled: true,
});
resources:
exampleResourceGroup:
type: azure:core:ResourceGroup
properties:
location: West Europe
exampleAnalyticsWorkspace:
type: azure:operationalinsights:AnalyticsWorkspace
properties:
location: ${exampleResourceGroup.location}
resourceGroupName: ${exampleResourceGroup.name}
sku: PerGB2018
retentionInDays: 30
exampleAccount:
type: azure:storage:Account
properties:
resourceGroupName: ${exampleResourceGroup.name}
location: ${exampleResourceGroup.location}
accountTier: Standard
accountReplicationType: LRS
exampleDataExportRule:
type: azure:loganalytics:DataExportRule
properties:
resourceGroupName: ${exampleResourceGroup.name}
workspaceResourceId: ${exampleAnalyticsWorkspace.id}
destinationResourceId: ${exampleAccount.id}
tableNames:
- Heartbeat
enabled: true
Create DataExportRule Resource
new DataExportRule(name: string, args: DataExportRuleArgs, opts?: CustomResourceOptions);
@overload
def DataExportRule(resource_name: str,
opts: Optional[ResourceOptions] = None,
destination_resource_id: Optional[str] = None,
enabled: Optional[bool] = None,
name: Optional[str] = None,
resource_group_name: Optional[str] = None,
table_names: Optional[Sequence[str]] = None,
workspace_resource_id: Optional[str] = None)
@overload
def DataExportRule(resource_name: str,
args: DataExportRuleArgs,
opts: Optional[ResourceOptions] = None)
func NewDataExportRule(ctx *Context, name string, args DataExportRuleArgs, opts ...ResourceOption) (*DataExportRule, error)
public DataExportRule(string name, DataExportRuleArgs args, CustomResourceOptions? opts = null)
public DataExportRule(String name, DataExportRuleArgs args)
public DataExportRule(String name, DataExportRuleArgs args, CustomResourceOptions options)
type: azure:loganalytics:DataExportRule
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DataExportRuleArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DataExportRuleArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DataExportRuleArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DataExportRuleArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DataExportRuleArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
DataExportRule Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The DataExportRule resource accepts the following input properties:
- Destination
Resource stringId The destination resource ID. It should be a storage account, an event hub namespace or an event hub. If the destination is an event hub namespace, an event hub would be created for each table automatically.
- Resource
Group stringName The name of the Resource Group where the Log Analytics Data Export should exist. Changing this forces a new Log Analytics Data Export Rule to be created.
- Table
Names List<string> A list of table names to export to the destination resource, for example:
["Heartbeat", "SecurityEvent"]
.- Workspace
Resource stringId The resource ID of the workspace. Changing this forces a new Log Analytics Data Export Rule to be created.
- Enabled bool
Is this Log Analytics Data Export Rule enabled? Possible values include
true
orfalse
. Defaults tofalse
.- Name string
The name of the Log Analytics Data Export Rule. Changing this forces a new Log Analytics Data Export Rule to be created.
- Destination
Resource stringId The destination resource ID. It should be a storage account, an event hub namespace or an event hub. If the destination is an event hub namespace, an event hub would be created for each table automatically.
- Resource
Group stringName The name of the Resource Group where the Log Analytics Data Export should exist. Changing this forces a new Log Analytics Data Export Rule to be created.
- Table
Names []string A list of table names to export to the destination resource, for example:
["Heartbeat", "SecurityEvent"]
.- Workspace
Resource stringId The resource ID of the workspace. Changing this forces a new Log Analytics Data Export Rule to be created.
- Enabled bool
Is this Log Analytics Data Export Rule enabled? Possible values include
true
orfalse
. Defaults tofalse
.- Name string
The name of the Log Analytics Data Export Rule. Changing this forces a new Log Analytics Data Export Rule to be created.
- destination
Resource StringId The destination resource ID. It should be a storage account, an event hub namespace or an event hub. If the destination is an event hub namespace, an event hub would be created for each table automatically.
- resource
Group StringName The name of the Resource Group where the Log Analytics Data Export should exist. Changing this forces a new Log Analytics Data Export Rule to be created.
- table
Names List<String> A list of table names to export to the destination resource, for example:
["Heartbeat", "SecurityEvent"]
.- workspace
Resource StringId The resource ID of the workspace. Changing this forces a new Log Analytics Data Export Rule to be created.
- enabled Boolean
Is this Log Analytics Data Export Rule enabled? Possible values include
true
orfalse
. Defaults tofalse
.- name String
The name of the Log Analytics Data Export Rule. Changing this forces a new Log Analytics Data Export Rule to be created.
- destination
Resource stringId The destination resource ID. It should be a storage account, an event hub namespace or an event hub. If the destination is an event hub namespace, an event hub would be created for each table automatically.
- resource
Group stringName The name of the Resource Group where the Log Analytics Data Export should exist. Changing this forces a new Log Analytics Data Export Rule to be created.
- table
Names string[] A list of table names to export to the destination resource, for example:
["Heartbeat", "SecurityEvent"]
.- workspace
Resource stringId The resource ID of the workspace. Changing this forces a new Log Analytics Data Export Rule to be created.
- enabled boolean
Is this Log Analytics Data Export Rule enabled? Possible values include
true
orfalse
. Defaults tofalse
.- name string
The name of the Log Analytics Data Export Rule. Changing this forces a new Log Analytics Data Export Rule to be created.
- destination_
resource_ strid The destination resource ID. It should be a storage account, an event hub namespace or an event hub. If the destination is an event hub namespace, an event hub would be created for each table automatically.
- resource_
group_ strname The name of the Resource Group where the Log Analytics Data Export should exist. Changing this forces a new Log Analytics Data Export Rule to be created.
- table_
names Sequence[str] A list of table names to export to the destination resource, for example:
["Heartbeat", "SecurityEvent"]
.- workspace_
resource_ strid The resource ID of the workspace. Changing this forces a new Log Analytics Data Export Rule to be created.
- enabled bool
Is this Log Analytics Data Export Rule enabled? Possible values include
true
orfalse
. Defaults tofalse
.- name str
The name of the Log Analytics Data Export Rule. Changing this forces a new Log Analytics Data Export Rule to be created.
- destination
Resource StringId The destination resource ID. It should be a storage account, an event hub namespace or an event hub. If the destination is an event hub namespace, an event hub would be created for each table automatically.
- resource
Group StringName The name of the Resource Group where the Log Analytics Data Export should exist. Changing this forces a new Log Analytics Data Export Rule to be created.
- table
Names List<String> A list of table names to export to the destination resource, for example:
["Heartbeat", "SecurityEvent"]
.- workspace
Resource StringId The resource ID of the workspace. Changing this forces a new Log Analytics Data Export Rule to be created.
- enabled Boolean
Is this Log Analytics Data Export Rule enabled? Possible values include
true
orfalse
. Defaults tofalse
.- name String
The name of the Log Analytics Data Export Rule. Changing this forces a new Log Analytics Data Export Rule to be created.
Outputs
All input properties are implicitly available as output properties. Additionally, the DataExportRule resource produces the following output properties:
- Export
Rule stringId The ID of the created Data Export Rule.
- Id string
The provider-assigned unique ID for this managed resource.
- Export
Rule stringId The ID of the created Data Export Rule.
- Id string
The provider-assigned unique ID for this managed resource.
- export
Rule StringId The ID of the created Data Export Rule.
- id String
The provider-assigned unique ID for this managed resource.
- export
Rule stringId The ID of the created Data Export Rule.
- id string
The provider-assigned unique ID for this managed resource.
- export_
rule_ strid The ID of the created Data Export Rule.
- id str
The provider-assigned unique ID for this managed resource.
- export
Rule StringId The ID of the created Data Export Rule.
- id String
The provider-assigned unique ID for this managed resource.
Look up Existing DataExportRule Resource
Get an existing DataExportRule resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DataExportRuleState, opts?: CustomResourceOptions): DataExportRule
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
destination_resource_id: Optional[str] = None,
enabled: Optional[bool] = None,
export_rule_id: Optional[str] = None,
name: Optional[str] = None,
resource_group_name: Optional[str] = None,
table_names: Optional[Sequence[str]] = None,
workspace_resource_id: Optional[str] = None) -> DataExportRule
func GetDataExportRule(ctx *Context, name string, id IDInput, state *DataExportRuleState, opts ...ResourceOption) (*DataExportRule, error)
public static DataExportRule Get(string name, Input<string> id, DataExportRuleState? state, CustomResourceOptions? opts = null)
public static DataExportRule get(String name, Output<String> id, DataExportRuleState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Destination
Resource stringId The destination resource ID. It should be a storage account, an event hub namespace or an event hub. If the destination is an event hub namespace, an event hub would be created for each table automatically.
- Enabled bool
Is this Log Analytics Data Export Rule enabled? Possible values include
true
orfalse
. Defaults tofalse
.- Export
Rule stringId The ID of the created Data Export Rule.
- Name string
The name of the Log Analytics Data Export Rule. Changing this forces a new Log Analytics Data Export Rule to be created.
- Resource
Group stringName The name of the Resource Group where the Log Analytics Data Export should exist. Changing this forces a new Log Analytics Data Export Rule to be created.
- Table
Names List<string> A list of table names to export to the destination resource, for example:
["Heartbeat", "SecurityEvent"]
.- Workspace
Resource stringId The resource ID of the workspace. Changing this forces a new Log Analytics Data Export Rule to be created.
- Destination
Resource stringId The destination resource ID. It should be a storage account, an event hub namespace or an event hub. If the destination is an event hub namespace, an event hub would be created for each table automatically.
- Enabled bool
Is this Log Analytics Data Export Rule enabled? Possible values include
true
orfalse
. Defaults tofalse
.- Export
Rule stringId The ID of the created Data Export Rule.
- Name string
The name of the Log Analytics Data Export Rule. Changing this forces a new Log Analytics Data Export Rule to be created.
- Resource
Group stringName The name of the Resource Group where the Log Analytics Data Export should exist. Changing this forces a new Log Analytics Data Export Rule to be created.
- Table
Names []string A list of table names to export to the destination resource, for example:
["Heartbeat", "SecurityEvent"]
.- Workspace
Resource stringId The resource ID of the workspace. Changing this forces a new Log Analytics Data Export Rule to be created.
- destination
Resource StringId The destination resource ID. It should be a storage account, an event hub namespace or an event hub. If the destination is an event hub namespace, an event hub would be created for each table automatically.
- enabled Boolean
Is this Log Analytics Data Export Rule enabled? Possible values include
true
orfalse
. Defaults tofalse
.- export
Rule StringId The ID of the created Data Export Rule.
- name String
The name of the Log Analytics Data Export Rule. Changing this forces a new Log Analytics Data Export Rule to be created.
- resource
Group StringName The name of the Resource Group where the Log Analytics Data Export should exist. Changing this forces a new Log Analytics Data Export Rule to be created.
- table
Names List<String> A list of table names to export to the destination resource, for example:
["Heartbeat", "SecurityEvent"]
.- workspace
Resource StringId The resource ID of the workspace. Changing this forces a new Log Analytics Data Export Rule to be created.
- destination
Resource stringId The destination resource ID. It should be a storage account, an event hub namespace or an event hub. If the destination is an event hub namespace, an event hub would be created for each table automatically.
- enabled boolean
Is this Log Analytics Data Export Rule enabled? Possible values include
true
orfalse
. Defaults tofalse
.- export
Rule stringId The ID of the created Data Export Rule.
- name string
The name of the Log Analytics Data Export Rule. Changing this forces a new Log Analytics Data Export Rule to be created.
- resource
Group stringName The name of the Resource Group where the Log Analytics Data Export should exist. Changing this forces a new Log Analytics Data Export Rule to be created.
- table
Names string[] A list of table names to export to the destination resource, for example:
["Heartbeat", "SecurityEvent"]
.- workspace
Resource stringId The resource ID of the workspace. Changing this forces a new Log Analytics Data Export Rule to be created.
- destination_
resource_ strid The destination resource ID. It should be a storage account, an event hub namespace or an event hub. If the destination is an event hub namespace, an event hub would be created for each table automatically.
- enabled bool
Is this Log Analytics Data Export Rule enabled? Possible values include
true
orfalse
. Defaults tofalse
.- export_
rule_ strid The ID of the created Data Export Rule.
- name str
The name of the Log Analytics Data Export Rule. Changing this forces a new Log Analytics Data Export Rule to be created.
- resource_
group_ strname The name of the Resource Group where the Log Analytics Data Export should exist. Changing this forces a new Log Analytics Data Export Rule to be created.
- table_
names Sequence[str] A list of table names to export to the destination resource, for example:
["Heartbeat", "SecurityEvent"]
.- workspace_
resource_ strid The resource ID of the workspace. Changing this forces a new Log Analytics Data Export Rule to be created.
- destination
Resource StringId The destination resource ID. It should be a storage account, an event hub namespace or an event hub. If the destination is an event hub namespace, an event hub would be created for each table automatically.
- enabled Boolean
Is this Log Analytics Data Export Rule enabled? Possible values include
true
orfalse
. Defaults tofalse
.- export
Rule StringId The ID of the created Data Export Rule.
- name String
The name of the Log Analytics Data Export Rule. Changing this forces a new Log Analytics Data Export Rule to be created.
- resource
Group StringName The name of the Resource Group where the Log Analytics Data Export should exist. Changing this forces a new Log Analytics Data Export Rule to be created.
- table
Names List<String> A list of table names to export to the destination resource, for example:
["Heartbeat", "SecurityEvent"]
.- workspace
Resource StringId The resource ID of the workspace. Changing this forces a new Log Analytics Data Export Rule to be created.
Import
Log Analytics Data Export Rule can be imported using the resource id
, e.g.
$ pulumi import azure:loganalytics/dataExportRule:DataExportRule example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.OperationalInsights/workspaces/workspace1/dataExports/dataExport1
Package Details
- Repository
- Azure Classic pulumi/pulumi-azure
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
azurerm
Terraform Provider.