1. Packages
  2. Azure Classic
  3. API Docs
  4. hdinsight
  5. SparkCluster

We recommend using Azure Native.

Viewing docs for Azure v4.42.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi
azure logo

We recommend using Azure Native.

Viewing docs for Azure v4.42.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi

    Manages a HDInsight Spark Cluster.

    Example Usage

    using Pulumi;
    using Azure = Pulumi.Azure;
    
    class MyStack : Stack
    {
        public MyStack()
        {
            var exampleResourceGroup = new Azure.Core.ResourceGroup("exampleResourceGroup", new Azure.Core.ResourceGroupArgs
            {
                Location = "West Europe",
            });
            var exampleAccount = new Azure.Storage.Account("exampleAccount", new Azure.Storage.AccountArgs
            {
                ResourceGroupName = exampleResourceGroup.Name,
                Location = exampleResourceGroup.Location,
                AccountTier = "Standard",
                AccountReplicationType = "LRS",
            });
            var exampleContainer = new Azure.Storage.Container("exampleContainer", new Azure.Storage.ContainerArgs
            {
                StorageAccountName = exampleAccount.Name,
                ContainerAccessType = "private",
            });
            var exampleSparkCluster = new Azure.HDInsight.SparkCluster("exampleSparkCluster", new Azure.HDInsight.SparkClusterArgs
            {
                ResourceGroupName = exampleResourceGroup.Name,
                Location = exampleResourceGroup.Location,
                ClusterVersion = "3.6",
                Tier = "Standard",
                ComponentVersion = new Azure.HDInsight.Inputs.SparkClusterComponentVersionArgs
                {
                    Spark = "2.3",
                },
                Gateway = new Azure.HDInsight.Inputs.SparkClusterGatewayArgs
                {
                    Enabled = true,
                    Username = "acctestusrgw",
                    Password = "Password123!",
                },
                StorageAccounts = 
                {
                    new Azure.HDInsight.Inputs.SparkClusterStorageAccountArgs
                    {
                        StorageContainerId = exampleContainer.Id,
                        StorageAccountKey = exampleAccount.PrimaryAccessKey,
                        IsDefault = true,
                    },
                },
                Roles = new Azure.HDInsight.Inputs.SparkClusterRolesArgs
                {
                    HeadNode = new Azure.HDInsight.Inputs.SparkClusterRolesHeadNodeArgs
                    {
                        VmSize = "Standard_A3",
                        Username = "acctestusrvm",
                        Password = "AccTestvdSC4daf986!",
                    },
                    WorkerNode = new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeArgs
                    {
                        VmSize = "Standard_A3",
                        Username = "acctestusrvm",
                        Password = "AccTestvdSC4daf986!",
                        TargetInstanceCount = 3,
                    },
                    ZookeeperNode = new Azure.HDInsight.Inputs.SparkClusterRolesZookeeperNodeArgs
                    {
                        VmSize = "Medium",
                        Username = "acctestusrvm",
                        Password = "AccTestvdSC4daf986!",
                    },
                },
            });
        }
    
    }
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-azure/sdk/v4/go/azure/core"
    	"github.com/pulumi/pulumi-azure/sdk/v4/go/azure/hdinsight"
    	"github.com/pulumi/pulumi-azure/sdk/v4/go/azure/storage"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		exampleResourceGroup, err := core.NewResourceGroup(ctx, "exampleResourceGroup", &core.ResourceGroupArgs{
    			Location: pulumi.String("West Europe"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleAccount, err := storage.NewAccount(ctx, "exampleAccount", &storage.AccountArgs{
    			ResourceGroupName:      exampleResourceGroup.Name,
    			Location:               exampleResourceGroup.Location,
    			AccountTier:            pulumi.String("Standard"),
    			AccountReplicationType: pulumi.String("LRS"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleContainer, err := storage.NewContainer(ctx, "exampleContainer", &storage.ContainerArgs{
    			StorageAccountName:  exampleAccount.Name,
    			ContainerAccessType: pulumi.String("private"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = hdinsight.NewSparkCluster(ctx, "exampleSparkCluster", &hdinsight.SparkClusterArgs{
    			ResourceGroupName: exampleResourceGroup.Name,
    			Location:          exampleResourceGroup.Location,
    			ClusterVersion:    pulumi.String("3.6"),
    			Tier:              pulumi.String("Standard"),
    			ComponentVersion: &hdinsight.SparkClusterComponentVersionArgs{
    				Spark: pulumi.String("2.3"),
    			},
    			Gateway: &hdinsight.SparkClusterGatewayArgs{
    				Enabled:  pulumi.Bool(true),
    				Username: pulumi.String("acctestusrgw"),
    				Password: pulumi.String("Password123!"),
    			},
    			StorageAccounts: hdinsight.SparkClusterStorageAccountArray{
    				&hdinsight.SparkClusterStorageAccountArgs{
    					StorageContainerId: exampleContainer.ID(),
    					StorageAccountKey:  exampleAccount.PrimaryAccessKey,
    					IsDefault:          pulumi.Bool(true),
    				},
    			},
    			Roles: &hdinsight.SparkClusterRolesArgs{
    				HeadNode: &hdinsight.SparkClusterRolesHeadNodeArgs{
    					VmSize:   pulumi.String("Standard_A3"),
    					Username: pulumi.String("acctestusrvm"),
    					Password: pulumi.String("AccTestvdSC4daf986!"),
    				},
    				WorkerNode: &hdinsight.SparkClusterRolesWorkerNodeArgs{
    					VmSize:              pulumi.String("Standard_A3"),
    					Username:            pulumi.String("acctestusrvm"),
    					Password:            pulumi.String("AccTestvdSC4daf986!"),
    					TargetInstanceCount: pulumi.Int(3),
    				},
    				ZookeeperNode: &hdinsight.SparkClusterRolesZookeeperNodeArgs{
    					VmSize:   pulumi.String("Medium"),
    					Username: pulumi.String("acctestusrvm"),
    					Password: pulumi.String("AccTestvdSC4daf986!"),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    

    Example coming soon!

    import * as pulumi from "@pulumi/pulumi";
    import * as azure from "@pulumi/azure";
    
    const exampleResourceGroup = new azure.core.ResourceGroup("exampleResourceGroup", {location: "West Europe"});
    const exampleAccount = new azure.storage.Account("exampleAccount", {
        resourceGroupName: exampleResourceGroup.name,
        location: exampleResourceGroup.location,
        accountTier: "Standard",
        accountReplicationType: "LRS",
    });
    const exampleContainer = new azure.storage.Container("exampleContainer", {
        storageAccountName: exampleAccount.name,
        containerAccessType: "private",
    });
    const exampleSparkCluster = new azure.hdinsight.SparkCluster("exampleSparkCluster", {
        resourceGroupName: exampleResourceGroup.name,
        location: exampleResourceGroup.location,
        clusterVersion: "3.6",
        tier: "Standard",
        componentVersion: {
            spark: "2.3",
        },
        gateway: {
            enabled: true,
            username: "acctestusrgw",
            password: "Password123!",
        },
        storageAccounts: [{
            storageContainerId: exampleContainer.id,
            storageAccountKey: exampleAccount.primaryAccessKey,
            isDefault: true,
        }],
        roles: {
            headNode: {
                vmSize: "Standard_A3",
                username: "acctestusrvm",
                password: "AccTestvdSC4daf986!",
            },
            workerNode: {
                vmSize: "Standard_A3",
                username: "acctestusrvm",
                password: "AccTestvdSC4daf986!",
                targetInstanceCount: 3,
            },
            zookeeperNode: {
                vmSize: "Medium",
                username: "acctestusrvm",
                password: "AccTestvdSC4daf986!",
            },
        },
    });
    
    import pulumi
    import pulumi_azure as azure
    
    example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
    example_account = azure.storage.Account("exampleAccount",
        resource_group_name=example_resource_group.name,
        location=example_resource_group.location,
        account_tier="Standard",
        account_replication_type="LRS")
    example_container = azure.storage.Container("exampleContainer",
        storage_account_name=example_account.name,
        container_access_type="private")
    example_spark_cluster = azure.hdinsight.SparkCluster("exampleSparkCluster",
        resource_group_name=example_resource_group.name,
        location=example_resource_group.location,
        cluster_version="3.6",
        tier="Standard",
        component_version=azure.hdinsight.SparkClusterComponentVersionArgs(
            spark="2.3",
        ),
        gateway=azure.hdinsight.SparkClusterGatewayArgs(
            enabled=True,
            username="acctestusrgw",
            password="Password123!",
        ),
        storage_accounts=[azure.hdinsight.SparkClusterStorageAccountArgs(
            storage_container_id=example_container.id,
            storage_account_key=example_account.primary_access_key,
            is_default=True,
        )],
        roles=azure.hdinsight.SparkClusterRolesArgs(
            head_node=azure.hdinsight.SparkClusterRolesHeadNodeArgs(
                vm_size="Standard_A3",
                username="acctestusrvm",
                password="AccTestvdSC4daf986!",
            ),
            worker_node=azure.hdinsight.SparkClusterRolesWorkerNodeArgs(
                vm_size="Standard_A3",
                username="acctestusrvm",
                password="AccTestvdSC4daf986!",
                target_instance_count=3,
            ),
            zookeeper_node=azure.hdinsight.SparkClusterRolesZookeeperNodeArgs(
                vm_size="Medium",
                username="acctestusrvm",
                password="AccTestvdSC4daf986!",
            ),
        ))
    

    Example coming soon!

    Create SparkCluster Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new SparkCluster(name: string, args: SparkClusterArgs, opts?: CustomResourceOptions);
    @overload
    def SparkCluster(resource_name: str,
                     args: SparkClusterArgs,
                     opts: Optional[ResourceOptions] = None)
    
    @overload
    def SparkCluster(resource_name: str,
                     opts: Optional[ResourceOptions] = None,
                     cluster_version: Optional[str] = None,
                     component_version: Optional[SparkClusterComponentVersionArgs] = None,
                     tier: Optional[str] = None,
                     gateway: Optional[SparkClusterGatewayArgs] = None,
                     roles: Optional[SparkClusterRolesArgs] = None,
                     resource_group_name: Optional[str] = None,
                     network: Optional[SparkClusterNetworkArgs] = None,
                     name: Optional[str] = None,
                     monitor: Optional[SparkClusterMonitorArgs] = None,
                     metastores: Optional[SparkClusterMetastoresArgs] = None,
                     location: Optional[str] = None,
                     security_profile: Optional[SparkClusterSecurityProfileArgs] = None,
                     storage_account_gen2: Optional[SparkClusterStorageAccountGen2Args] = None,
                     storage_accounts: Optional[Sequence[SparkClusterStorageAccountArgs]] = None,
                     tags: Optional[Mapping[str, str]] = None,
                     encryption_in_transit_enabled: Optional[bool] = None,
                     tls_min_version: Optional[str] = None)
    func NewSparkCluster(ctx *Context, name string, args SparkClusterArgs, opts ...ResourceOption) (*SparkCluster, error)
    public SparkCluster(string name, SparkClusterArgs args, CustomResourceOptions? opts = null)
    public SparkCluster(String name, SparkClusterArgs args)
    public SparkCluster(String name, SparkClusterArgs args, CustomResourceOptions options)
    
    type: azure:hdinsight:SparkCluster
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args SparkClusterArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args SparkClusterArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args SparkClusterArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args SparkClusterArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args SparkClusterArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var sparkClusterResource = new Azure.HDInsight.SparkCluster("sparkClusterResource", new()
    {
        ClusterVersion = "string",
        ComponentVersion = new Azure.HDInsight.Inputs.SparkClusterComponentVersionArgs
        {
            Spark = "string",
        },
        Tier = "string",
        Gateway = new Azure.HDInsight.Inputs.SparkClusterGatewayArgs
        {
            Password = "string",
            Username = "string",
        },
        Roles = new Azure.HDInsight.Inputs.SparkClusterRolesArgs
        {
            HeadNode = new Azure.HDInsight.Inputs.SparkClusterRolesHeadNodeArgs
            {
                Username = "string",
                VmSize = "string",
                Password = "string",
                SshKeys = new[]
                {
                    "string",
                },
                SubnetId = "string",
                VirtualNetworkId = "string",
            },
            WorkerNode = new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeArgs
            {
                TargetInstanceCount = 0,
                Username = "string",
                VmSize = "string",
                Autoscale = new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeAutoscaleArgs
                {
                    Capacity = new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeAutoscaleCapacityArgs
                    {
                        MaxInstanceCount = 0,
                        MinInstanceCount = 0,
                    },
                    Recurrence = new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeAutoscaleRecurrenceArgs
                    {
                        Schedules = new[]
                        {
                            new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeAutoscaleRecurrenceScheduleArgs
                            {
                                Days = new[]
                                {
                                    "string",
                                },
                                TargetInstanceCount = 0,
                                Time = "string",
                            },
                        },
                        Timezone = "string",
                    },
                },
                Password = "string",
                SshKeys = new[]
                {
                    "string",
                },
                SubnetId = "string",
                VirtualNetworkId = "string",
            },
            ZookeeperNode = new Azure.HDInsight.Inputs.SparkClusterRolesZookeeperNodeArgs
            {
                Username = "string",
                VmSize = "string",
                Password = "string",
                SshKeys = new[]
                {
                    "string",
                },
                SubnetId = "string",
                VirtualNetworkId = "string",
            },
        },
        ResourceGroupName = "string",
        Network = new Azure.HDInsight.Inputs.SparkClusterNetworkArgs
        {
            ConnectionDirection = "string",
            PrivateLinkEnabled = false,
        },
        Name = "string",
        Monitor = new Azure.HDInsight.Inputs.SparkClusterMonitorArgs
        {
            LogAnalyticsWorkspaceId = "string",
            PrimaryKey = "string",
        },
        Metastores = new Azure.HDInsight.Inputs.SparkClusterMetastoresArgs
        {
            Ambari = new Azure.HDInsight.Inputs.SparkClusterMetastoresAmbariArgs
            {
                DatabaseName = "string",
                Password = "string",
                Server = "string",
                Username = "string",
            },
            Hive = new Azure.HDInsight.Inputs.SparkClusterMetastoresHiveArgs
            {
                DatabaseName = "string",
                Password = "string",
                Server = "string",
                Username = "string",
            },
            Oozie = new Azure.HDInsight.Inputs.SparkClusterMetastoresOozieArgs
            {
                DatabaseName = "string",
                Password = "string",
                Server = "string",
                Username = "string",
            },
        },
        Location = "string",
        SecurityProfile = new Azure.HDInsight.Inputs.SparkClusterSecurityProfileArgs
        {
            AaddsResourceId = "string",
            DomainName = "string",
            DomainUserPassword = "string",
            DomainUsername = "string",
            LdapsUrls = new[]
            {
                "string",
            },
            MsiResourceId = "string",
            ClusterUsersGroupDns = new[]
            {
                "string",
            },
        },
        StorageAccountGen2 = new Azure.HDInsight.Inputs.SparkClusterStorageAccountGen2Args
        {
            FilesystemId = "string",
            IsDefault = false,
            ManagedIdentityResourceId = "string",
            StorageResourceId = "string",
        },
        StorageAccounts = new[]
        {
            new Azure.HDInsight.Inputs.SparkClusterStorageAccountArgs
            {
                IsDefault = false,
                StorageAccountKey = "string",
                StorageContainerId = "string",
                StorageResourceId = "string",
            },
        },
        Tags = 
        {
            { "string", "string" },
        },
        EncryptionInTransitEnabled = false,
        TlsMinVersion = "string",
    });
    
    example, err := hdinsight.NewSparkCluster(ctx, "sparkClusterResource", &hdinsight.SparkClusterArgs{
    	ClusterVersion: pulumi.String("string"),
    	ComponentVersion: &hdinsight.SparkClusterComponentVersionArgs{
    		Spark: pulumi.String("string"),
    	},
    	Tier: pulumi.String("string"),
    	Gateway: &hdinsight.SparkClusterGatewayArgs{
    		Password: pulumi.String("string"),
    		Username: pulumi.String("string"),
    	},
    	Roles: &hdinsight.SparkClusterRolesArgs{
    		HeadNode: &hdinsight.SparkClusterRolesHeadNodeArgs{
    			Username: pulumi.String("string"),
    			VmSize:   pulumi.String("string"),
    			Password: pulumi.String("string"),
    			SshKeys: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			SubnetId:         pulumi.String("string"),
    			VirtualNetworkId: pulumi.String("string"),
    		},
    		WorkerNode: &hdinsight.SparkClusterRolesWorkerNodeArgs{
    			TargetInstanceCount: pulumi.Int(0),
    			Username:            pulumi.String("string"),
    			VmSize:              pulumi.String("string"),
    			Autoscale: &hdinsight.SparkClusterRolesWorkerNodeAutoscaleArgs{
    				Capacity: &hdinsight.SparkClusterRolesWorkerNodeAutoscaleCapacityArgs{
    					MaxInstanceCount: pulumi.Int(0),
    					MinInstanceCount: pulumi.Int(0),
    				},
    				Recurrence: &hdinsight.SparkClusterRolesWorkerNodeAutoscaleRecurrenceArgs{
    					Schedules: hdinsight.SparkClusterRolesWorkerNodeAutoscaleRecurrenceScheduleArray{
    						&hdinsight.SparkClusterRolesWorkerNodeAutoscaleRecurrenceScheduleArgs{
    							Days: pulumi.StringArray{
    								pulumi.String("string"),
    							},
    							TargetInstanceCount: pulumi.Int(0),
    							Time:                pulumi.String("string"),
    						},
    					},
    					Timezone: pulumi.String("string"),
    				},
    			},
    			Password: pulumi.String("string"),
    			SshKeys: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			SubnetId:         pulumi.String("string"),
    			VirtualNetworkId: pulumi.String("string"),
    		},
    		ZookeeperNode: &hdinsight.SparkClusterRolesZookeeperNodeArgs{
    			Username: pulumi.String("string"),
    			VmSize:   pulumi.String("string"),
    			Password: pulumi.String("string"),
    			SshKeys: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			SubnetId:         pulumi.String("string"),
    			VirtualNetworkId: pulumi.String("string"),
    		},
    	},
    	ResourceGroupName: pulumi.String("string"),
    	Network: &hdinsight.SparkClusterNetworkArgs{
    		ConnectionDirection: pulumi.String("string"),
    		PrivateLinkEnabled:  pulumi.Bool(false),
    	},
    	Name: pulumi.String("string"),
    	Monitor: &hdinsight.SparkClusterMonitorArgs{
    		LogAnalyticsWorkspaceId: pulumi.String("string"),
    		PrimaryKey:              pulumi.String("string"),
    	},
    	Metastores: &hdinsight.SparkClusterMetastoresArgs{
    		Ambari: &hdinsight.SparkClusterMetastoresAmbariArgs{
    			DatabaseName: pulumi.String("string"),
    			Password:     pulumi.String("string"),
    			Server:       pulumi.String("string"),
    			Username:     pulumi.String("string"),
    		},
    		Hive: &hdinsight.SparkClusterMetastoresHiveArgs{
    			DatabaseName: pulumi.String("string"),
    			Password:     pulumi.String("string"),
    			Server:       pulumi.String("string"),
    			Username:     pulumi.String("string"),
    		},
    		Oozie: &hdinsight.SparkClusterMetastoresOozieArgs{
    			DatabaseName: pulumi.String("string"),
    			Password:     pulumi.String("string"),
    			Server:       pulumi.String("string"),
    			Username:     pulumi.String("string"),
    		},
    	},
    	Location: pulumi.String("string"),
    	SecurityProfile: &hdinsight.SparkClusterSecurityProfileArgs{
    		AaddsResourceId:    pulumi.String("string"),
    		DomainName:         pulumi.String("string"),
    		DomainUserPassword: pulumi.String("string"),
    		DomainUsername:     pulumi.String("string"),
    		LdapsUrls: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		MsiResourceId: pulumi.String("string"),
    		ClusterUsersGroupDns: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    	},
    	StorageAccountGen2: &hdinsight.SparkClusterStorageAccountGen2Args{
    		FilesystemId:              pulumi.String("string"),
    		IsDefault:                 pulumi.Bool(false),
    		ManagedIdentityResourceId: pulumi.String("string"),
    		StorageResourceId:         pulumi.String("string"),
    	},
    	StorageAccounts: hdinsight.SparkClusterStorageAccountArray{
    		&hdinsight.SparkClusterStorageAccountArgs{
    			IsDefault:          pulumi.Bool(false),
    			StorageAccountKey:  pulumi.String("string"),
    			StorageContainerId: pulumi.String("string"),
    			StorageResourceId:  pulumi.String("string"),
    		},
    	},
    	Tags: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	EncryptionInTransitEnabled: pulumi.Bool(false),
    	TlsMinVersion:              pulumi.String("string"),
    })
    
    var sparkClusterResource = new SparkCluster("sparkClusterResource", SparkClusterArgs.builder()
        .clusterVersion("string")
        .componentVersion(SparkClusterComponentVersionArgs.builder()
            .spark("string")
            .build())
        .tier("string")
        .gateway(SparkClusterGatewayArgs.builder()
            .password("string")
            .username("string")
            .build())
        .roles(SparkClusterRolesArgs.builder()
            .headNode(SparkClusterRolesHeadNodeArgs.builder()
                .username("string")
                .vmSize("string")
                .password("string")
                .sshKeys("string")
                .subnetId("string")
                .virtualNetworkId("string")
                .build())
            .workerNode(SparkClusterRolesWorkerNodeArgs.builder()
                .targetInstanceCount(0)
                .username("string")
                .vmSize("string")
                .autoscale(SparkClusterRolesWorkerNodeAutoscaleArgs.builder()
                    .capacity(SparkClusterRolesWorkerNodeAutoscaleCapacityArgs.builder()
                        .maxInstanceCount(0)
                        .minInstanceCount(0)
                        .build())
                    .recurrence(SparkClusterRolesWorkerNodeAutoscaleRecurrenceArgs.builder()
                        .schedules(SparkClusterRolesWorkerNodeAutoscaleRecurrenceScheduleArgs.builder()
                            .days("string")
                            .targetInstanceCount(0)
                            .time("string")
                            .build())
                        .timezone("string")
                        .build())
                    .build())
                .password("string")
                .sshKeys("string")
                .subnetId("string")
                .virtualNetworkId("string")
                .build())
            .zookeeperNode(SparkClusterRolesZookeeperNodeArgs.builder()
                .username("string")
                .vmSize("string")
                .password("string")
                .sshKeys("string")
                .subnetId("string")
                .virtualNetworkId("string")
                .build())
            .build())
        .resourceGroupName("string")
        .network(SparkClusterNetworkArgs.builder()
            .connectionDirection("string")
            .privateLinkEnabled(false)
            .build())
        .name("string")
        .monitor(SparkClusterMonitorArgs.builder()
            .logAnalyticsWorkspaceId("string")
            .primaryKey("string")
            .build())
        .metastores(SparkClusterMetastoresArgs.builder()
            .ambari(SparkClusterMetastoresAmbariArgs.builder()
                .databaseName("string")
                .password("string")
                .server("string")
                .username("string")
                .build())
            .hive(SparkClusterMetastoresHiveArgs.builder()
                .databaseName("string")
                .password("string")
                .server("string")
                .username("string")
                .build())
            .oozie(SparkClusterMetastoresOozieArgs.builder()
                .databaseName("string")
                .password("string")
                .server("string")
                .username("string")
                .build())
            .build())
        .location("string")
        .securityProfile(SparkClusterSecurityProfileArgs.builder()
            .aaddsResourceId("string")
            .domainName("string")
            .domainUserPassword("string")
            .domainUsername("string")
            .ldapsUrls("string")
            .msiResourceId("string")
            .clusterUsersGroupDns("string")
            .build())
        .storageAccountGen2(SparkClusterStorageAccountGen2Args.builder()
            .filesystemId("string")
            .isDefault(false)
            .managedIdentityResourceId("string")
            .storageResourceId("string")
            .build())
        .storageAccounts(SparkClusterStorageAccountArgs.builder()
            .isDefault(false)
            .storageAccountKey("string")
            .storageContainerId("string")
            .storageResourceId("string")
            .build())
        .tags(Map.of("string", "string"))
        .encryptionInTransitEnabled(false)
        .tlsMinVersion("string")
        .build());
    
    spark_cluster_resource = azure.hdinsight.SparkCluster("sparkClusterResource",
        cluster_version="string",
        component_version={
            "spark": "string",
        },
        tier="string",
        gateway={
            "password": "string",
            "username": "string",
        },
        roles={
            "head_node": {
                "username": "string",
                "vm_size": "string",
                "password": "string",
                "ssh_keys": ["string"],
                "subnet_id": "string",
                "virtual_network_id": "string",
            },
            "worker_node": {
                "target_instance_count": 0,
                "username": "string",
                "vm_size": "string",
                "autoscale": {
                    "capacity": {
                        "max_instance_count": 0,
                        "min_instance_count": 0,
                    },
                    "recurrence": {
                        "schedules": [{
                            "days": ["string"],
                            "target_instance_count": 0,
                            "time": "string",
                        }],
                        "timezone": "string",
                    },
                },
                "password": "string",
                "ssh_keys": ["string"],
                "subnet_id": "string",
                "virtual_network_id": "string",
            },
            "zookeeper_node": {
                "username": "string",
                "vm_size": "string",
                "password": "string",
                "ssh_keys": ["string"],
                "subnet_id": "string",
                "virtual_network_id": "string",
            },
        },
        resource_group_name="string",
        network={
            "connection_direction": "string",
            "private_link_enabled": False,
        },
        name="string",
        monitor={
            "log_analytics_workspace_id": "string",
            "primary_key": "string",
        },
        metastores={
            "ambari": {
                "database_name": "string",
                "password": "string",
                "server": "string",
                "username": "string",
            },
            "hive": {
                "database_name": "string",
                "password": "string",
                "server": "string",
                "username": "string",
            },
            "oozie": {
                "database_name": "string",
                "password": "string",
                "server": "string",
                "username": "string",
            },
        },
        location="string",
        security_profile={
            "aadds_resource_id": "string",
            "domain_name": "string",
            "domain_user_password": "string",
            "domain_username": "string",
            "ldaps_urls": ["string"],
            "msi_resource_id": "string",
            "cluster_users_group_dns": ["string"],
        },
        storage_account_gen2={
            "filesystem_id": "string",
            "is_default": False,
            "managed_identity_resource_id": "string",
            "storage_resource_id": "string",
        },
        storage_accounts=[{
            "is_default": False,
            "storage_account_key": "string",
            "storage_container_id": "string",
            "storage_resource_id": "string",
        }],
        tags={
            "string": "string",
        },
        encryption_in_transit_enabled=False,
        tls_min_version="string")
    
    const sparkClusterResource = new azure.hdinsight.SparkCluster("sparkClusterResource", {
        clusterVersion: "string",
        componentVersion: {
            spark: "string",
        },
        tier: "string",
        gateway: {
            password: "string",
            username: "string",
        },
        roles: {
            headNode: {
                username: "string",
                vmSize: "string",
                password: "string",
                sshKeys: ["string"],
                subnetId: "string",
                virtualNetworkId: "string",
            },
            workerNode: {
                targetInstanceCount: 0,
                username: "string",
                vmSize: "string",
                autoscale: {
                    capacity: {
                        maxInstanceCount: 0,
                        minInstanceCount: 0,
                    },
                    recurrence: {
                        schedules: [{
                            days: ["string"],
                            targetInstanceCount: 0,
                            time: "string",
                        }],
                        timezone: "string",
                    },
                },
                password: "string",
                sshKeys: ["string"],
                subnetId: "string",
                virtualNetworkId: "string",
            },
            zookeeperNode: {
                username: "string",
                vmSize: "string",
                password: "string",
                sshKeys: ["string"],
                subnetId: "string",
                virtualNetworkId: "string",
            },
        },
        resourceGroupName: "string",
        network: {
            connectionDirection: "string",
            privateLinkEnabled: false,
        },
        name: "string",
        monitor: {
            logAnalyticsWorkspaceId: "string",
            primaryKey: "string",
        },
        metastores: {
            ambari: {
                databaseName: "string",
                password: "string",
                server: "string",
                username: "string",
            },
            hive: {
                databaseName: "string",
                password: "string",
                server: "string",
                username: "string",
            },
            oozie: {
                databaseName: "string",
                password: "string",
                server: "string",
                username: "string",
            },
        },
        location: "string",
        securityProfile: {
            aaddsResourceId: "string",
            domainName: "string",
            domainUserPassword: "string",
            domainUsername: "string",
            ldapsUrls: ["string"],
            msiResourceId: "string",
            clusterUsersGroupDns: ["string"],
        },
        storageAccountGen2: {
            filesystemId: "string",
            isDefault: false,
            managedIdentityResourceId: "string",
            storageResourceId: "string",
        },
        storageAccounts: [{
            isDefault: false,
            storageAccountKey: "string",
            storageContainerId: "string",
            storageResourceId: "string",
        }],
        tags: {
            string: "string",
        },
        encryptionInTransitEnabled: false,
        tlsMinVersion: "string",
    });
    
    type: azure:hdinsight:SparkCluster
    properties:
        clusterVersion: string
        componentVersion:
            spark: string
        encryptionInTransitEnabled: false
        gateway:
            password: string
            username: string
        location: string
        metastores:
            ambari:
                databaseName: string
                password: string
                server: string
                username: string
            hive:
                databaseName: string
                password: string
                server: string
                username: string
            oozie:
                databaseName: string
                password: string
                server: string
                username: string
        monitor:
            logAnalyticsWorkspaceId: string
            primaryKey: string
        name: string
        network:
            connectionDirection: string
            privateLinkEnabled: false
        resourceGroupName: string
        roles:
            headNode:
                password: string
                sshKeys:
                    - string
                subnetId: string
                username: string
                virtualNetworkId: string
                vmSize: string
            workerNode:
                autoscale:
                    capacity:
                        maxInstanceCount: 0
                        minInstanceCount: 0
                    recurrence:
                        schedules:
                            - days:
                                - string
                              targetInstanceCount: 0
                              time: string
                        timezone: string
                password: string
                sshKeys:
                    - string
                subnetId: string
                targetInstanceCount: 0
                username: string
                virtualNetworkId: string
                vmSize: string
            zookeeperNode:
                password: string
                sshKeys:
                    - string
                subnetId: string
                username: string
                virtualNetworkId: string
                vmSize: string
        securityProfile:
            aaddsResourceId: string
            clusterUsersGroupDns:
                - string
            domainName: string
            domainUserPassword: string
            domainUsername: string
            ldapsUrls:
                - string
            msiResourceId: string
        storageAccountGen2:
            filesystemId: string
            isDefault: false
            managedIdentityResourceId: string
            storageResourceId: string
        storageAccounts:
            - isDefault: false
              storageAccountKey: string
              storageContainerId: string
              storageResourceId: string
        tags:
            string: string
        tier: string
        tlsMinVersion: string
    

    SparkCluster Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The SparkCluster resource accepts the following input properties:

    ClusterVersion string
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    ComponentVersion SparkClusterComponentVersion
    A component_version block as defined below.
    Gateway SparkClusterGateway
    A gateway block as defined below.
    ResourceGroupName string
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Roles SparkClusterRoles
    A roles block as defined below.
    Tier string
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    EncryptionInTransitEnabled bool
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    Location string
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Metastores SparkClusterMetastores
    A metastores block as defined below.
    Monitor SparkClusterMonitor
    A monitor block as defined below.
    Name string
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    Network SparkClusterNetwork
    A network block as defined below.
    SecurityProfile SparkClusterSecurityProfile
    A security_profile block as defined below.
    StorageAccountGen2 SparkClusterStorageAccountGen2
    A storage_account_gen2 block as defined below.
    StorageAccounts List<SparkClusterStorageAccount>
    One or more storage_account block as defined below.
    Tags Dictionary<string, string>
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    TlsMinVersion string
    ClusterVersion string
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    ComponentVersion SparkClusterComponentVersionArgs
    A component_version block as defined below.
    Gateway SparkClusterGatewayArgs
    A gateway block as defined below.
    ResourceGroupName string
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Roles SparkClusterRolesArgs
    A roles block as defined below.
    Tier string
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    EncryptionInTransitEnabled bool
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    Location string
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Metastores SparkClusterMetastoresArgs
    A metastores block as defined below.
    Monitor SparkClusterMonitorArgs
    A monitor block as defined below.
    Name string
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    Network SparkClusterNetworkArgs
    A network block as defined below.
    SecurityProfile SparkClusterSecurityProfileArgs
    A security_profile block as defined below.
    StorageAccountGen2 SparkClusterStorageAccountGen2Args
    A storage_account_gen2 block as defined below.
    StorageAccounts []SparkClusterStorageAccountArgs
    One or more storage_account block as defined below.
    Tags map[string]string
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    TlsMinVersion string
    clusterVersion String
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    componentVersion SparkClusterComponentVersion
    A component_version block as defined below.
    gateway SparkClusterGateway
    A gateway block as defined below.
    resourceGroupName String
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles SparkClusterRoles
    A roles block as defined below.
    tier String
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    encryptionInTransitEnabled Boolean
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    location String
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores SparkClusterMetastores
    A metastores block as defined below.
    monitor SparkClusterMonitor
    A monitor block as defined below.
    name String
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network SparkClusterNetwork
    A network block as defined below.
    securityProfile SparkClusterSecurityProfile
    A security_profile block as defined below.
    storageAccountGen2 SparkClusterStorageAccountGen2
    A storage_account_gen2 block as defined below.
    storageAccounts List<SparkClusterStorageAccount>
    One or more storage_account block as defined below.
    tags Map<String,String>
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tlsMinVersion String
    clusterVersion string
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    componentVersion SparkClusterComponentVersion
    A component_version block as defined below.
    gateway SparkClusterGateway
    A gateway block as defined below.
    resourceGroupName string
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles SparkClusterRoles
    A roles block as defined below.
    tier string
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    encryptionInTransitEnabled boolean
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    location string
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores SparkClusterMetastores
    A metastores block as defined below.
    monitor SparkClusterMonitor
    A monitor block as defined below.
    name string
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network SparkClusterNetwork
    A network block as defined below.
    securityProfile SparkClusterSecurityProfile
    A security_profile block as defined below.
    storageAccountGen2 SparkClusterStorageAccountGen2
    A storage_account_gen2 block as defined below.
    storageAccounts SparkClusterStorageAccount[]
    One or more storage_account block as defined below.
    tags {[key: string]: string}
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tlsMinVersion string
    cluster_version str
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    component_version SparkClusterComponentVersionArgs
    A component_version block as defined below.
    gateway SparkClusterGatewayArgs
    A gateway block as defined below.
    resource_group_name str
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles SparkClusterRolesArgs
    A roles block as defined below.
    tier str
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    encryption_in_transit_enabled bool
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    location str
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores SparkClusterMetastoresArgs
    A metastores block as defined below.
    monitor SparkClusterMonitorArgs
    A monitor block as defined below.
    name str
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network SparkClusterNetworkArgs
    A network block as defined below.
    security_profile SparkClusterSecurityProfileArgs
    A security_profile block as defined below.
    storage_account_gen2 SparkClusterStorageAccountGen2Args
    A storage_account_gen2 block as defined below.
    storage_accounts Sequence[SparkClusterStorageAccountArgs]
    One or more storage_account block as defined below.
    tags Mapping[str, str]
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tls_min_version str
    clusterVersion String
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    componentVersion Property Map
    A component_version block as defined below.
    gateway Property Map
    A gateway block as defined below.
    resourceGroupName String
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles Property Map
    A roles block as defined below.
    tier String
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    encryptionInTransitEnabled Boolean
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    location String
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores Property Map
    A metastores block as defined below.
    monitor Property Map
    A monitor block as defined below.
    name String
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network Property Map
    A network block as defined below.
    securityProfile Property Map
    A security_profile block as defined below.
    storageAccountGen2 Property Map
    A storage_account_gen2 block as defined below.
    storageAccounts List<Property Map>
    One or more storage_account block as defined below.
    tags Map<String>
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tlsMinVersion String

    Outputs

    All input properties are implicitly available as output properties. Additionally, the SparkCluster resource produces the following output properties:

    HttpsEndpoint string
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    Id string
    The provider-assigned unique ID for this managed resource.
    SshEndpoint string
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    HttpsEndpoint string
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    Id string
    The provider-assigned unique ID for this managed resource.
    SshEndpoint string
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    httpsEndpoint String
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    id String
    The provider-assigned unique ID for this managed resource.
    sshEndpoint String
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    httpsEndpoint string
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    id string
    The provider-assigned unique ID for this managed resource.
    sshEndpoint string
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    https_endpoint str
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    id str
    The provider-assigned unique ID for this managed resource.
    ssh_endpoint str
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    httpsEndpoint String
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    id String
    The provider-assigned unique ID for this managed resource.
    sshEndpoint String
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.

    Look up Existing SparkCluster Resource

    Get an existing SparkCluster resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: SparkClusterState, opts?: CustomResourceOptions): SparkCluster
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            cluster_version: Optional[str] = None,
            component_version: Optional[SparkClusterComponentVersionArgs] = None,
            encryption_in_transit_enabled: Optional[bool] = None,
            gateway: Optional[SparkClusterGatewayArgs] = None,
            https_endpoint: Optional[str] = None,
            location: Optional[str] = None,
            metastores: Optional[SparkClusterMetastoresArgs] = None,
            monitor: Optional[SparkClusterMonitorArgs] = None,
            name: Optional[str] = None,
            network: Optional[SparkClusterNetworkArgs] = None,
            resource_group_name: Optional[str] = None,
            roles: Optional[SparkClusterRolesArgs] = None,
            security_profile: Optional[SparkClusterSecurityProfileArgs] = None,
            ssh_endpoint: Optional[str] = None,
            storage_account_gen2: Optional[SparkClusterStorageAccountGen2Args] = None,
            storage_accounts: Optional[Sequence[SparkClusterStorageAccountArgs]] = None,
            tags: Optional[Mapping[str, str]] = None,
            tier: Optional[str] = None,
            tls_min_version: Optional[str] = None) -> SparkCluster
    func GetSparkCluster(ctx *Context, name string, id IDInput, state *SparkClusterState, opts ...ResourceOption) (*SparkCluster, error)
    public static SparkCluster Get(string name, Input<string> id, SparkClusterState? state, CustomResourceOptions? opts = null)
    public static SparkCluster get(String name, Output<String> id, SparkClusterState state, CustomResourceOptions options)
    resources:  _:    type: azure:hdinsight:SparkCluster    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    ClusterVersion string
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    ComponentVersion SparkClusterComponentVersion
    A component_version block as defined below.
    EncryptionInTransitEnabled bool
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    Gateway SparkClusterGateway
    A gateway block as defined below.
    HttpsEndpoint string
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    Location string
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Metastores SparkClusterMetastores
    A metastores block as defined below.
    Monitor SparkClusterMonitor
    A monitor block as defined below.
    Name string
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    Network SparkClusterNetwork
    A network block as defined below.
    ResourceGroupName string
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Roles SparkClusterRoles
    A roles block as defined below.
    SecurityProfile SparkClusterSecurityProfile
    A security_profile block as defined below.
    SshEndpoint string
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    StorageAccountGen2 SparkClusterStorageAccountGen2
    A storage_account_gen2 block as defined below.
    StorageAccounts List<SparkClusterStorageAccount>
    One or more storage_account block as defined below.
    Tags Dictionary<string, string>
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    Tier string
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    TlsMinVersion string
    ClusterVersion string
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    ComponentVersion SparkClusterComponentVersionArgs
    A component_version block as defined below.
    EncryptionInTransitEnabled bool
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    Gateway SparkClusterGatewayArgs
    A gateway block as defined below.
    HttpsEndpoint string
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    Location string
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Metastores SparkClusterMetastoresArgs
    A metastores block as defined below.
    Monitor SparkClusterMonitorArgs
    A monitor block as defined below.
    Name string
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    Network SparkClusterNetworkArgs
    A network block as defined below.
    ResourceGroupName string
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Roles SparkClusterRolesArgs
    A roles block as defined below.
    SecurityProfile SparkClusterSecurityProfileArgs
    A security_profile block as defined below.
    SshEndpoint string
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    StorageAccountGen2 SparkClusterStorageAccountGen2Args
    A storage_account_gen2 block as defined below.
    StorageAccounts []SparkClusterStorageAccountArgs
    One or more storage_account block as defined below.
    Tags map[string]string
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    Tier string
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    TlsMinVersion string
    clusterVersion String
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    componentVersion SparkClusterComponentVersion
    A component_version block as defined below.
    encryptionInTransitEnabled Boolean
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    gateway SparkClusterGateway
    A gateway block as defined below.
    httpsEndpoint String
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    location String
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores SparkClusterMetastores
    A metastores block as defined below.
    monitor SparkClusterMonitor
    A monitor block as defined below.
    name String
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network SparkClusterNetwork
    A network block as defined below.
    resourceGroupName String
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles SparkClusterRoles
    A roles block as defined below.
    securityProfile SparkClusterSecurityProfile
    A security_profile block as defined below.
    sshEndpoint String
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    storageAccountGen2 SparkClusterStorageAccountGen2
    A storage_account_gen2 block as defined below.
    storageAccounts List<SparkClusterStorageAccount>
    One or more storage_account block as defined below.
    tags Map<String,String>
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tier String
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    tlsMinVersion String
    clusterVersion string
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    componentVersion SparkClusterComponentVersion
    A component_version block as defined below.
    encryptionInTransitEnabled boolean
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    gateway SparkClusterGateway
    A gateway block as defined below.
    httpsEndpoint string
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    location string
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores SparkClusterMetastores
    A metastores block as defined below.
    monitor SparkClusterMonitor
    A monitor block as defined below.
    name string
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network SparkClusterNetwork
    A network block as defined below.
    resourceGroupName string
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles SparkClusterRoles
    A roles block as defined below.
    securityProfile SparkClusterSecurityProfile
    A security_profile block as defined below.
    sshEndpoint string
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    storageAccountGen2 SparkClusterStorageAccountGen2
    A storage_account_gen2 block as defined below.
    storageAccounts SparkClusterStorageAccount[]
    One or more storage_account block as defined below.
    tags {[key: string]: string}
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tier string
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    tlsMinVersion string
    cluster_version str
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    component_version SparkClusterComponentVersionArgs
    A component_version block as defined below.
    encryption_in_transit_enabled bool
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    gateway SparkClusterGatewayArgs
    A gateway block as defined below.
    https_endpoint str
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    location str
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores SparkClusterMetastoresArgs
    A metastores block as defined below.
    monitor SparkClusterMonitorArgs
    A monitor block as defined below.
    name str
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network SparkClusterNetworkArgs
    A network block as defined below.
    resource_group_name str
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles SparkClusterRolesArgs
    A roles block as defined below.
    security_profile SparkClusterSecurityProfileArgs
    A security_profile block as defined below.
    ssh_endpoint str
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    storage_account_gen2 SparkClusterStorageAccountGen2Args
    A storage_account_gen2 block as defined below.
    storage_accounts Sequence[SparkClusterStorageAccountArgs]
    One or more storage_account block as defined below.
    tags Mapping[str, str]
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tier str
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    tls_min_version str
    clusterVersion String
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    componentVersion Property Map
    A component_version block as defined below.
    encryptionInTransitEnabled Boolean
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    gateway Property Map
    A gateway block as defined below.
    httpsEndpoint String
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    location String
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores Property Map
    A metastores block as defined below.
    monitor Property Map
    A monitor block as defined below.
    name String
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network Property Map
    A network block as defined below.
    resourceGroupName String
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles Property Map
    A roles block as defined below.
    securityProfile Property Map
    A security_profile block as defined below.
    sshEndpoint String
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    storageAccountGen2 Property Map
    A storage_account_gen2 block as defined below.
    storageAccounts List<Property Map>
    One or more storage_account block as defined below.
    tags Map<String>
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tier String
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    tlsMinVersion String

    Supporting Types

    SparkClusterComponentVersion, SparkClusterComponentVersionArgs

    Spark string
    The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    Spark string
    The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    spark String
    The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    spark string
    The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    spark str
    The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    spark String
    The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.

    SparkClusterGateway, SparkClusterGatewayArgs

    Password string
    The password used for the Ambari Portal.
    Username string
    The username used for the Ambari Portal. Changing this forces a new resource to be created.
    Enabled bool
    Is the Ambari portal enabled? The HDInsight API doesn't support disabling gateway anymore.

    Deprecated: HDInsight doesn't support disabling gateway anymore

    Password string
    The password used for the Ambari Portal.
    Username string
    The username used for the Ambari Portal. Changing this forces a new resource to be created.
    Enabled bool
    Is the Ambari portal enabled? The HDInsight API doesn't support disabling gateway anymore.

    Deprecated: HDInsight doesn't support disabling gateway anymore

    password String
    The password used for the Ambari Portal.
    username String
    The username used for the Ambari Portal. Changing this forces a new resource to be created.
    enabled Boolean
    Is the Ambari portal enabled? The HDInsight API doesn't support disabling gateway anymore.

    Deprecated: HDInsight doesn't support disabling gateway anymore

    password string
    The password used for the Ambari Portal.
    username string
    The username used for the Ambari Portal. Changing this forces a new resource to be created.
    enabled boolean
    Is the Ambari portal enabled? The HDInsight API doesn't support disabling gateway anymore.

    Deprecated: HDInsight doesn't support disabling gateway anymore

    password str
    The password used for the Ambari Portal.
    username str
    The username used for the Ambari Portal. Changing this forces a new resource to be created.
    enabled bool
    Is the Ambari portal enabled? The HDInsight API doesn't support disabling gateway anymore.

    Deprecated: HDInsight doesn't support disabling gateway anymore

    password String
    The password used for the Ambari Portal.
    username String
    The username used for the Ambari Portal. Changing this forces a new resource to be created.
    enabled Boolean
    Is the Ambari portal enabled? The HDInsight API doesn't support disabling gateway anymore.

    Deprecated: HDInsight doesn't support disabling gateway anymore

    SparkClusterMetastores, SparkClusterMetastoresArgs

    Ambari SparkClusterMetastoresAmbari
    An ambari block as defined below.
    Hive SparkClusterMetastoresHive
    A hive block as defined below.
    Oozie SparkClusterMetastoresOozie
    An oozie block as defined below.
    Ambari SparkClusterMetastoresAmbari
    An ambari block as defined below.
    Hive SparkClusterMetastoresHive
    A hive block as defined below.
    Oozie SparkClusterMetastoresOozie
    An oozie block as defined below.
    ambari SparkClusterMetastoresAmbari
    An ambari block as defined below.
    hive SparkClusterMetastoresHive
    A hive block as defined below.
    oozie SparkClusterMetastoresOozie
    An oozie block as defined below.
    ambari SparkClusterMetastoresAmbari
    An ambari block as defined below.
    hive SparkClusterMetastoresHive
    A hive block as defined below.
    oozie SparkClusterMetastoresOozie
    An oozie block as defined below.
    ambari SparkClusterMetastoresAmbari
    An ambari block as defined below.
    hive SparkClusterMetastoresHive
    A hive block as defined below.
    oozie SparkClusterMetastoresOozie
    An oozie block as defined below.
    ambari Property Map
    An ambari block as defined below.
    hive Property Map
    A hive block as defined below.
    oozie Property Map
    An oozie block as defined below.

    SparkClusterMetastoresAmbari, SparkClusterMetastoresAmbariArgs

    DatabaseName string
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    Password string
    The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    Server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
    Username string
    The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    DatabaseName string
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    Password string
    The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    Server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
    Username string
    The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName String
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password String
    The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server String
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
    username String
    The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName string
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password string
    The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
    username string
    The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    database_name str
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password str
    The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server str
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
    username str
    The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName String
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password String
    The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server String
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
    username String
    The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.

    SparkClusterMetastoresHive, SparkClusterMetastoresHiveArgs

    DatabaseName string
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    Password string
    The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    Server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
    Username string
    The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    DatabaseName string
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    Password string
    The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    Server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
    Username string
    The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName String
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password String
    The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server String
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
    username String
    The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName string
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password string
    The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
    username string
    The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    database_name str
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password str
    The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server str
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
    username str
    The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName String
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password String
    The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server String
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
    username String
    The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.

    SparkClusterMetastoresOozie, SparkClusterMetastoresOozieArgs

    DatabaseName string
    The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
    Password string
    The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    Server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
    Username string
    The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    DatabaseName string
    The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
    Password string
    The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    Server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
    Username string
    The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName String
    The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
    password String
    The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server String
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
    username String
    The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName string
    The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
    password string
    The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
    username string
    The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    database_name str
    The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
    password str
    The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server str
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
    username str
    The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName String
    The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
    password String
    The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server String
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
    username String
    The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.

    SparkClusterMonitor, SparkClusterMonitorArgs

    LogAnalyticsWorkspaceId string
    The Operations Management Suite (OMS) workspace ID.
    PrimaryKey string
    The Operations Management Suite (OMS) workspace key.
    LogAnalyticsWorkspaceId string
    The Operations Management Suite (OMS) workspace ID.
    PrimaryKey string
    The Operations Management Suite (OMS) workspace key.
    logAnalyticsWorkspaceId String
    The Operations Management Suite (OMS) workspace ID.
    primaryKey String
    The Operations Management Suite (OMS) workspace key.
    logAnalyticsWorkspaceId string
    The Operations Management Suite (OMS) workspace ID.
    primaryKey string
    The Operations Management Suite (OMS) workspace key.
    log_analytics_workspace_id str
    The Operations Management Suite (OMS) workspace ID.
    primary_key str
    The Operations Management Suite (OMS) workspace key.
    logAnalyticsWorkspaceId String
    The Operations Management Suite (OMS) workspace ID.
    primaryKey String
    The Operations Management Suite (OMS) workspace key.

    SparkClusterNetwork, SparkClusterNetworkArgs

    ConnectionDirection string
    The direction of the resource provider connection. Possible values include Inbound or Outbound. Defaults to Inbound. Changing this forces a new resource to be created.
    PrivateLinkEnabled bool
    Is the private link enabled? Possible values include True or False. Defaults to False. Changing this forces a new resource to be created.
    ConnectionDirection string
    The direction of the resource provider connection. Possible values include Inbound or Outbound. Defaults to Inbound. Changing this forces a new resource to be created.
    PrivateLinkEnabled bool
    Is the private link enabled? Possible values include True or False. Defaults to False. Changing this forces a new resource to be created.
    connectionDirection String
    The direction of the resource provider connection. Possible values include Inbound or Outbound. Defaults to Inbound. Changing this forces a new resource to be created.
    privateLinkEnabled Boolean
    Is the private link enabled? Possible values include True or False. Defaults to False. Changing this forces a new resource to be created.
    connectionDirection string
    The direction of the resource provider connection. Possible values include Inbound or Outbound. Defaults to Inbound. Changing this forces a new resource to be created.
    privateLinkEnabled boolean
    Is the private link enabled? Possible values include True or False. Defaults to False. Changing this forces a new resource to be created.
    connection_direction str
    The direction of the resource provider connection. Possible values include Inbound or Outbound. Defaults to Inbound. Changing this forces a new resource to be created.
    private_link_enabled bool
    Is the private link enabled? Possible values include True or False. Defaults to False. Changing this forces a new resource to be created.
    connectionDirection String
    The direction of the resource provider connection. Possible values include Inbound or Outbound. Defaults to Inbound. Changing this forces a new resource to be created.
    privateLinkEnabled Boolean
    Is the private link enabled? Possible values include True or False. Defaults to False. Changing this forces a new resource to be created.

    SparkClusterRoles, SparkClusterRolesArgs

    HeadNode SparkClusterRolesHeadNode
    A head_node block as defined above.
    WorkerNode SparkClusterRolesWorkerNode
    A worker_node block as defined below.
    ZookeeperNode SparkClusterRolesZookeeperNode
    A zookeeper_node block as defined below.
    HeadNode SparkClusterRolesHeadNode
    A head_node block as defined above.
    WorkerNode SparkClusterRolesWorkerNode
    A worker_node block as defined below.
    ZookeeperNode SparkClusterRolesZookeeperNode
    A zookeeper_node block as defined below.
    headNode SparkClusterRolesHeadNode
    A head_node block as defined above.
    workerNode SparkClusterRolesWorkerNode
    A worker_node block as defined below.
    zookeeperNode SparkClusterRolesZookeeperNode
    A zookeeper_node block as defined below.
    headNode SparkClusterRolesHeadNode
    A head_node block as defined above.
    workerNode SparkClusterRolesWorkerNode
    A worker_node block as defined below.
    zookeeperNode SparkClusterRolesZookeeperNode
    A zookeeper_node block as defined below.
    head_node SparkClusterRolesHeadNode
    A head_node block as defined above.
    worker_node SparkClusterRolesWorkerNode
    A worker_node block as defined below.
    zookeeper_node SparkClusterRolesZookeeperNode
    A zookeeper_node block as defined below.
    headNode Property Map
    A head_node block as defined above.
    workerNode Property Map
    A worker_node block as defined below.
    zookeeperNode Property Map
    A zookeeper_node block as defined below.

    SparkClusterRolesHeadNode, SparkClusterRolesHeadNodeArgs

    Username string
    The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    VmSize string
    The Size of the Virtual Machine which should be used as the Head Nodes. Changing this forces a new resource to be created.
    Password string
    The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    SshKeys List<string>
    A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.
    SubnetId string
    The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    VirtualNetworkId string
    The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    Username string
    The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    VmSize string
    The Size of the Virtual Machine which should be used as the Head Nodes. Changing this forces a new resource to be created.
    Password string
    The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    SshKeys []string
    A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.
    SubnetId string
    The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    VirtualNetworkId string
    The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    username String
    The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    vmSize String
    The Size of the Virtual Machine which should be used as the Head Nodes. Changing this forces a new resource to be created.
    password String
    The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    sshKeys List<String>
    A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.
    subnetId String
    The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId String
    The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    username string
    The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    vmSize string
    The Size of the Virtual Machine which should be used as the Head Nodes. Changing this forces a new resource to be created.
    password string
    The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    sshKeys string[]
    A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.
    subnetId string
    The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId string
    The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    username str
    The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    vm_size str
    The Size of the Virtual Machine which should be used as the Head Nodes. Changing this forces a new resource to be created.
    password str
    The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    ssh_keys Sequence[str]
    A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.
    subnet_id str
    The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtual_network_id str
    The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    username String
    The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    vmSize String
    The Size of the Virtual Machine which should be used as the Head Nodes. Changing this forces a new resource to be created.
    password String
    The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    sshKeys List<String>
    A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.
    subnetId String
    The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId String
    The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.

    SparkClusterRolesWorkerNode, SparkClusterRolesWorkerNodeArgs

    TargetInstanceCount int
    The number of instances which should be run for the Worker Nodes.
    Username string
    The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    VmSize string
    The Size of the Virtual Machine which should be used as the Worker Nodes. Changing this forces a new resource to be created.
    Autoscale SparkClusterRolesWorkerNodeAutoscale
    A autoscale block as defined below.
    MinInstanceCount int
    The minimum number of instances which should be run for the Worker Nodes. Changing this forces a new resource to be created.

    Deprecated: this has been deprecated from the API and will be removed in version 3.0 of the provider

    Password string
    The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    SshKeys List<string>
    A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.
    SubnetId string
    The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    VirtualNetworkId string
    The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    TargetInstanceCount int
    The number of instances which should be run for the Worker Nodes.
    Username string
    The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    VmSize string
    The Size of the Virtual Machine which should be used as the Worker Nodes. Changing this forces a new resource to be created.
    Autoscale SparkClusterRolesWorkerNodeAutoscale
    A autoscale block as defined below.
    MinInstanceCount int
    The minimum number of instances which should be run for the Worker Nodes. Changing this forces a new resource to be created.

    Deprecated: this has been deprecated from the API and will be removed in version 3.0 of the provider

    Password string
    The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    SshKeys []string
    A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.
    SubnetId string
    The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    VirtualNetworkId string
    The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    targetInstanceCount Integer
    The number of instances which should be run for the Worker Nodes.
    username String
    The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    vmSize String
    The Size of the Virtual Machine which should be used as the Worker Nodes. Changing this forces a new resource to be created.
    autoscale SparkClusterRolesWorkerNodeAutoscale
    A autoscale block as defined below.
    minInstanceCount Integer
    The minimum number of instances which should be run for the Worker Nodes. Changing this forces a new resource to be created.

    Deprecated: this has been deprecated from the API and will be removed in version 3.0 of the provider

    password String
    The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    sshKeys List<String>
    A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.
    subnetId String
    The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId String
    The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    targetInstanceCount number
    The number of instances which should be run for the Worker Nodes.
    username string
    The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    vmSize string
    The Size of the Virtual Machine which should be used as the Worker Nodes. Changing this forces a new resource to be created.
    autoscale SparkClusterRolesWorkerNodeAutoscale
    A autoscale block as defined below.
    minInstanceCount number
    The minimum number of instances which should be run for the Worker Nodes. Changing this forces a new resource to be created.

    Deprecated: this has been deprecated from the API and will be removed in version 3.0 of the provider

    password string
    The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    sshKeys string[]
    A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.
    subnetId string
    The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId string
    The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    target_instance_count int
    The number of instances which should be run for the Worker Nodes.
    username str
    The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    vm_size str
    The Size of the Virtual Machine which should be used as the Worker Nodes. Changing this forces a new resource to be created.
    autoscale SparkClusterRolesWorkerNodeAutoscale
    A autoscale block as defined below.
    min_instance_count int
    The minimum number of instances which should be run for the Worker Nodes. Changing this forces a new resource to be created.

    Deprecated: this has been deprecated from the API and will be removed in version 3.0 of the provider

    password str
    The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    ssh_keys Sequence[str]
    A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.
    subnet_id str
    The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtual_network_id str
    The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    targetInstanceCount Number
    The number of instances which should be run for the Worker Nodes.
    username String
    The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    vmSize String
    The Size of the Virtual Machine which should be used as the Worker Nodes. Changing this forces a new resource to be created.
    autoscale Property Map
    A autoscale block as defined below.
    minInstanceCount Number
    The minimum number of instances which should be run for the Worker Nodes. Changing this forces a new resource to be created.

    Deprecated: this has been deprecated from the API and will be removed in version 3.0 of the provider

    password String
    The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    sshKeys List<String>
    A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.
    subnetId String
    The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId String
    The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.

    SparkClusterRolesWorkerNodeAutoscale, SparkClusterRolesWorkerNodeAutoscaleArgs

    Capacity SparkClusterRolesWorkerNodeAutoscaleCapacity
    A capacity block as defined below.
    Recurrence SparkClusterRolesWorkerNodeAutoscaleRecurrence
    A recurrence block as defined below.
    Capacity SparkClusterRolesWorkerNodeAutoscaleCapacity
    A capacity block as defined below.
    Recurrence SparkClusterRolesWorkerNodeAutoscaleRecurrence
    A recurrence block as defined below.
    capacity SparkClusterRolesWorkerNodeAutoscaleCapacity
    A capacity block as defined below.
    recurrence SparkClusterRolesWorkerNodeAutoscaleRecurrence
    A recurrence block as defined below.
    capacity SparkClusterRolesWorkerNodeAutoscaleCapacity
    A capacity block as defined below.
    recurrence SparkClusterRolesWorkerNodeAutoscaleRecurrence
    A recurrence block as defined below.
    capacity SparkClusterRolesWorkerNodeAutoscaleCapacity
    A capacity block as defined below.
    recurrence SparkClusterRolesWorkerNodeAutoscaleRecurrence
    A recurrence block as defined below.
    capacity Property Map
    A capacity block as defined below.
    recurrence Property Map
    A recurrence block as defined below.

    SparkClusterRolesWorkerNodeAutoscaleCapacity, SparkClusterRolesWorkerNodeAutoscaleCapacityArgs

    MaxInstanceCount int
    The maximum number of worker nodes to autoscale to based on the cluster's activity.
    MinInstanceCount int
    The minimum number of worker nodes to autoscale to based on the cluster's activity.
    MaxInstanceCount int
    The maximum number of worker nodes to autoscale to based on the cluster's activity.
    MinInstanceCount int
    The minimum number of worker nodes to autoscale to based on the cluster's activity.
    maxInstanceCount Integer
    The maximum number of worker nodes to autoscale to based on the cluster's activity.
    minInstanceCount Integer
    The minimum number of worker nodes to autoscale to based on the cluster's activity.
    maxInstanceCount number
    The maximum number of worker nodes to autoscale to based on the cluster's activity.
    minInstanceCount number
    The minimum number of worker nodes to autoscale to based on the cluster's activity.
    max_instance_count int
    The maximum number of worker nodes to autoscale to based on the cluster's activity.
    min_instance_count int
    The minimum number of worker nodes to autoscale to based on the cluster's activity.
    maxInstanceCount Number
    The maximum number of worker nodes to autoscale to based on the cluster's activity.
    minInstanceCount Number
    The minimum number of worker nodes to autoscale to based on the cluster's activity.

    SparkClusterRolesWorkerNodeAutoscaleRecurrence, SparkClusterRolesWorkerNodeAutoscaleRecurrenceArgs

    Schedules List<SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule>
    A list of schedule blocks as defined below.
    Timezone string
    The time zone for the autoscale schedule times.
    Schedules []SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule
    A list of schedule blocks as defined below.
    Timezone string
    The time zone for the autoscale schedule times.
    schedules List<SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule>
    A list of schedule blocks as defined below.
    timezone String
    The time zone for the autoscale schedule times.
    schedules SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule[]
    A list of schedule blocks as defined below.
    timezone string
    The time zone for the autoscale schedule times.
    schedules Sequence[SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule]
    A list of schedule blocks as defined below.
    timezone str
    The time zone for the autoscale schedule times.
    schedules List<Property Map>
    A list of schedule blocks as defined below.
    timezone String
    The time zone for the autoscale schedule times.

    SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule, SparkClusterRolesWorkerNodeAutoscaleRecurrenceScheduleArgs

    Days List<string>
    The days of the week to perform autoscale.
    TargetInstanceCount int
    The number of worker nodes to autoscale at the specified time.
    Time string
    The time of day to perform the autoscale in 24hour format.
    Days []string
    The days of the week to perform autoscale.
    TargetInstanceCount int
    The number of worker nodes to autoscale at the specified time.
    Time string
    The time of day to perform the autoscale in 24hour format.
    days List<String>
    The days of the week to perform autoscale.
    targetInstanceCount Integer
    The number of worker nodes to autoscale at the specified time.
    time String
    The time of day to perform the autoscale in 24hour format.
    days string[]
    The days of the week to perform autoscale.
    targetInstanceCount number
    The number of worker nodes to autoscale at the specified time.
    time string
    The time of day to perform the autoscale in 24hour format.
    days Sequence[str]
    The days of the week to perform autoscale.
    target_instance_count int
    The number of worker nodes to autoscale at the specified time.
    time str
    The time of day to perform the autoscale in 24hour format.
    days List<String>
    The days of the week to perform autoscale.
    targetInstanceCount Number
    The number of worker nodes to autoscale at the specified time.
    time String
    The time of day to perform the autoscale in 24hour format.

    SparkClusterRolesZookeeperNode, SparkClusterRolesZookeeperNodeArgs

    Username string
    The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    VmSize string
    The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Changing this forces a new resource to be created.
    Password string
    The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    SshKeys List<string>
    A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.
    SubnetId string
    The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    VirtualNetworkId string
    The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    Username string
    The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    VmSize string
    The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Changing this forces a new resource to be created.
    Password string
    The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    SshKeys []string
    A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.
    SubnetId string
    The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    VirtualNetworkId string
    The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    username String
    The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    vmSize String
    The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Changing this forces a new resource to be created.
    password String
    The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    sshKeys List<String>
    A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.
    subnetId String
    The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId String
    The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    username string
    The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    vmSize string
    The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Changing this forces a new resource to be created.
    password string
    The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    sshKeys string[]
    A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.
    subnetId string
    The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId string
    The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    username str
    The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    vm_size str
    The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Changing this forces a new resource to be created.
    password str
    The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    ssh_keys Sequence[str]
    A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.
    subnet_id str
    The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtual_network_id str
    The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    username String
    The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    vmSize String
    The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Changing this forces a new resource to be created.
    password String
    The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    sshKeys List<String>
    A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.
    subnetId String
    The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId String
    The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.

    SparkClusterSecurityProfile, SparkClusterSecurityProfileArgs

    AaddsResourceId string
    The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
    DomainName string
    The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    DomainUserPassword string
    The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    DomainUsername string
    The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    LdapsUrls List<string>
    A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
    MsiResourceId string
    The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
    ClusterUsersGroupDns List<string>
    A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
    AaddsResourceId string
    The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
    DomainName string
    The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    DomainUserPassword string
    The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    DomainUsername string
    The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    LdapsUrls []string
    A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
    MsiResourceId string
    The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
    ClusterUsersGroupDns []string
    A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
    aaddsResourceId String
    The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
    domainName String
    The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domainUserPassword String
    The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domainUsername String
    The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    ldapsUrls List<String>
    A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
    msiResourceId String
    The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
    clusterUsersGroupDns List<String>
    A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
    aaddsResourceId string
    The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
    domainName string
    The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domainUserPassword string
    The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domainUsername string
    The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    ldapsUrls string[]
    A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
    msiResourceId string
    The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
    clusterUsersGroupDns string[]
    A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
    aadds_resource_id str
    The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
    domain_name str
    The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domain_user_password str
    The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domain_username str
    The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    ldaps_urls Sequence[str]
    A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
    msi_resource_id str
    The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
    cluster_users_group_dns Sequence[str]
    A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
    aaddsResourceId String
    The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
    domainName String
    The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domainUserPassword String
    The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domainUsername String
    The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    ldapsUrls List<String>
    A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
    msiResourceId String
    The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
    clusterUsersGroupDns List<String>
    A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.

    SparkClusterStorageAccount, SparkClusterStorageAccountArgs

    IsDefault bool
    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
    StorageAccountKey string
    The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
    StorageContainerId string
    The ID of the Storage Container. Changing this forces a new resource to be created.
    StorageResourceId string
    The ID of the Storage Account. Changing this forces a new resource to be created.
    IsDefault bool
    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
    StorageAccountKey string
    The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
    StorageContainerId string
    The ID of the Storage Container. Changing this forces a new resource to be created.
    StorageResourceId string
    The ID of the Storage Account. Changing this forces a new resource to be created.
    isDefault Boolean
    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
    storageAccountKey String
    The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
    storageContainerId String
    The ID of the Storage Container. Changing this forces a new resource to be created.
    storageResourceId String
    The ID of the Storage Account. Changing this forces a new resource to be created.
    isDefault boolean
    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
    storageAccountKey string
    The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
    storageContainerId string
    The ID of the Storage Container. Changing this forces a new resource to be created.
    storageResourceId string
    The ID of the Storage Account. Changing this forces a new resource to be created.
    is_default bool
    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
    storage_account_key str
    The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
    storage_container_id str
    The ID of the Storage Container. Changing this forces a new resource to be created.
    storage_resource_id str
    The ID of the Storage Account. Changing this forces a new resource to be created.
    isDefault Boolean
    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
    storageAccountKey String
    The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
    storageContainerId String
    The ID of the Storage Container. Changing this forces a new resource to be created.
    storageResourceId String
    The ID of the Storage Account. Changing this forces a new resource to be created.

    SparkClusterStorageAccountGen2, SparkClusterStorageAccountGen2Args

    FilesystemId string
    The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
    IsDefault bool
    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
    ManagedIdentityResourceId string
    The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.
    StorageResourceId string
    The ID of the Storage Account. Changing this forces a new resource to be created.
    FilesystemId string
    The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
    IsDefault bool
    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
    ManagedIdentityResourceId string
    The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.
    StorageResourceId string
    The ID of the Storage Account. Changing this forces a new resource to be created.
    filesystemId String
    The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
    isDefault Boolean
    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
    managedIdentityResourceId String
    The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.
    storageResourceId String
    The ID of the Storage Account. Changing this forces a new resource to be created.
    filesystemId string
    The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
    isDefault boolean
    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
    managedIdentityResourceId string
    The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.
    storageResourceId string
    The ID of the Storage Account. Changing this forces a new resource to be created.
    filesystem_id str
    The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
    is_default bool
    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
    managed_identity_resource_id str
    The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.
    storage_resource_id str
    The ID of the Storage Account. Changing this forces a new resource to be created.
    filesystemId String
    The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
    isDefault Boolean
    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
    managedIdentityResourceId String
    The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.
    storageResourceId String
    The ID of the Storage Account. Changing this forces a new resource to be created.

    Import

    HDInsight Spark Clusters can be imported using the resource id, e.g.

     $ pulumi import azure:hdinsight/sparkCluster:SparkCluster example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.HDInsight/clusters/cluster1
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    Azure Classic pulumi/pulumi-azure
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the azurerm Terraform Provider.
    azure logo

    We recommend using Azure Native.

    Viewing docs for Azure v4.42.0 (Older version)
    published on Monday, Mar 9, 2026 by Pulumi
      Try Pulumi Cloud free. Your team will thank you.