1. Packages
  2. Azure Classic
  3. API Docs
  4. hdinsight
  5. SparkCluster

We recommend using Azure Native.

Azure Classic v5.69.0 published on Thursday, Mar 14, 2024 by Pulumi

azure.hdinsight.SparkCluster

Explore with Pulumi AI

azure logo

We recommend using Azure Native.

Azure Classic v5.69.0 published on Thursday, Mar 14, 2024 by Pulumi

    Manages a HDInsight Spark Cluster.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as azure from "@pulumi/azure";
    
    const example = new azure.core.ResourceGroup("example", {
        name: "example-resources",
        location: "West Europe",
    });
    const exampleAccount = new azure.storage.Account("example", {
        name: "hdinsightstor",
        resourceGroupName: example.name,
        location: example.location,
        accountTier: "Standard",
        accountReplicationType: "LRS",
    });
    const exampleContainer = new azure.storage.Container("example", {
        name: "hdinsight",
        storageAccountName: exampleAccount.name,
        containerAccessType: "private",
    });
    const exampleSparkCluster = new azure.hdinsight.SparkCluster("example", {
        name: "example-hdicluster",
        resourceGroupName: example.name,
        location: example.location,
        clusterVersion: "3.6",
        tier: "Standard",
        componentVersion: {
            spark: "2.3",
        },
        gateway: {
            username: "acctestusrgw",
            password: "Password123!",
        },
        storageAccounts: [{
            storageContainerId: exampleContainer.id,
            storageAccountKey: exampleAccount.primaryAccessKey,
            isDefault: true,
        }],
        roles: {
            headNode: {
                vmSize: "Standard_A3",
                username: "acctestusrvm",
                password: "AccTestvdSC4daf986!",
            },
            workerNode: {
                vmSize: "Standard_A3",
                username: "acctestusrvm",
                password: "AccTestvdSC4daf986!",
                targetInstanceCount: 3,
            },
            zookeeperNode: {
                vmSize: "Medium",
                username: "acctestusrvm",
                password: "AccTestvdSC4daf986!",
            },
        },
    });
    
    import pulumi
    import pulumi_azure as azure
    
    example = azure.core.ResourceGroup("example",
        name="example-resources",
        location="West Europe")
    example_account = azure.storage.Account("example",
        name="hdinsightstor",
        resource_group_name=example.name,
        location=example.location,
        account_tier="Standard",
        account_replication_type="LRS")
    example_container = azure.storage.Container("example",
        name="hdinsight",
        storage_account_name=example_account.name,
        container_access_type="private")
    example_spark_cluster = azure.hdinsight.SparkCluster("example",
        name="example-hdicluster",
        resource_group_name=example.name,
        location=example.location,
        cluster_version="3.6",
        tier="Standard",
        component_version=azure.hdinsight.SparkClusterComponentVersionArgs(
            spark="2.3",
        ),
        gateway=azure.hdinsight.SparkClusterGatewayArgs(
            username="acctestusrgw",
            password="Password123!",
        ),
        storage_accounts=[azure.hdinsight.SparkClusterStorageAccountArgs(
            storage_container_id=example_container.id,
            storage_account_key=example_account.primary_access_key,
            is_default=True,
        )],
        roles=azure.hdinsight.SparkClusterRolesArgs(
            head_node=azure.hdinsight.SparkClusterRolesHeadNodeArgs(
                vm_size="Standard_A3",
                username="acctestusrvm",
                password="AccTestvdSC4daf986!",
            ),
            worker_node=azure.hdinsight.SparkClusterRolesWorkerNodeArgs(
                vm_size="Standard_A3",
                username="acctestusrvm",
                password="AccTestvdSC4daf986!",
                target_instance_count=3,
            ),
            zookeeper_node=azure.hdinsight.SparkClusterRolesZookeeperNodeArgs(
                vm_size="Medium",
                username="acctestusrvm",
                password="AccTestvdSC4daf986!",
            ),
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
    	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/hdinsight"
    	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/storage"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
    			Name:     pulumi.String("example-resources"),
    			Location: pulumi.String("West Europe"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleAccount, err := storage.NewAccount(ctx, "example", &storage.AccountArgs{
    			Name:                   pulumi.String("hdinsightstor"),
    			ResourceGroupName:      example.Name,
    			Location:               example.Location,
    			AccountTier:            pulumi.String("Standard"),
    			AccountReplicationType: pulumi.String("LRS"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleContainer, err := storage.NewContainer(ctx, "example", &storage.ContainerArgs{
    			Name:                pulumi.String("hdinsight"),
    			StorageAccountName:  exampleAccount.Name,
    			ContainerAccessType: pulumi.String("private"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = hdinsight.NewSparkCluster(ctx, "example", &hdinsight.SparkClusterArgs{
    			Name:              pulumi.String("example-hdicluster"),
    			ResourceGroupName: example.Name,
    			Location:          example.Location,
    			ClusterVersion:    pulumi.String("3.6"),
    			Tier:              pulumi.String("Standard"),
    			ComponentVersion: &hdinsight.SparkClusterComponentVersionArgs{
    				Spark: pulumi.String("2.3"),
    			},
    			Gateway: &hdinsight.SparkClusterGatewayArgs{
    				Username: pulumi.String("acctestusrgw"),
    				Password: pulumi.String("Password123!"),
    			},
    			StorageAccounts: hdinsight.SparkClusterStorageAccountArray{
    				&hdinsight.SparkClusterStorageAccountArgs{
    					StorageContainerId: exampleContainer.ID(),
    					StorageAccountKey:  exampleAccount.PrimaryAccessKey,
    					IsDefault:          pulumi.Bool(true),
    				},
    			},
    			Roles: &hdinsight.SparkClusterRolesArgs{
    				HeadNode: &hdinsight.SparkClusterRolesHeadNodeArgs{
    					VmSize:   pulumi.String("Standard_A3"),
    					Username: pulumi.String("acctestusrvm"),
    					Password: pulumi.String("AccTestvdSC4daf986!"),
    				},
    				WorkerNode: &hdinsight.SparkClusterRolesWorkerNodeArgs{
    					VmSize:              pulumi.String("Standard_A3"),
    					Username:            pulumi.String("acctestusrvm"),
    					Password:            pulumi.String("AccTestvdSC4daf986!"),
    					TargetInstanceCount: pulumi.Int(3),
    				},
    				ZookeeperNode: &hdinsight.SparkClusterRolesZookeeperNodeArgs{
    					VmSize:   pulumi.String("Medium"),
    					Username: pulumi.String("acctestusrvm"),
    					Password: pulumi.String("AccTestvdSC4daf986!"),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Azure = Pulumi.Azure;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Azure.Core.ResourceGroup("example", new()
        {
            Name = "example-resources",
            Location = "West Europe",
        });
    
        var exampleAccount = new Azure.Storage.Account("example", new()
        {
            Name = "hdinsightstor",
            ResourceGroupName = example.Name,
            Location = example.Location,
            AccountTier = "Standard",
            AccountReplicationType = "LRS",
        });
    
        var exampleContainer = new Azure.Storage.Container("example", new()
        {
            Name = "hdinsight",
            StorageAccountName = exampleAccount.Name,
            ContainerAccessType = "private",
        });
    
        var exampleSparkCluster = new Azure.HDInsight.SparkCluster("example", new()
        {
            Name = "example-hdicluster",
            ResourceGroupName = example.Name,
            Location = example.Location,
            ClusterVersion = "3.6",
            Tier = "Standard",
            ComponentVersion = new Azure.HDInsight.Inputs.SparkClusterComponentVersionArgs
            {
                Spark = "2.3",
            },
            Gateway = new Azure.HDInsight.Inputs.SparkClusterGatewayArgs
            {
                Username = "acctestusrgw",
                Password = "Password123!",
            },
            StorageAccounts = new[]
            {
                new Azure.HDInsight.Inputs.SparkClusterStorageAccountArgs
                {
                    StorageContainerId = exampleContainer.Id,
                    StorageAccountKey = exampleAccount.PrimaryAccessKey,
                    IsDefault = true,
                },
            },
            Roles = new Azure.HDInsight.Inputs.SparkClusterRolesArgs
            {
                HeadNode = new Azure.HDInsight.Inputs.SparkClusterRolesHeadNodeArgs
                {
                    VmSize = "Standard_A3",
                    Username = "acctestusrvm",
                    Password = "AccTestvdSC4daf986!",
                },
                WorkerNode = new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeArgs
                {
                    VmSize = "Standard_A3",
                    Username = "acctestusrvm",
                    Password = "AccTestvdSC4daf986!",
                    TargetInstanceCount = 3,
                },
                ZookeeperNode = new Azure.HDInsight.Inputs.SparkClusterRolesZookeeperNodeArgs
                {
                    VmSize = "Medium",
                    Username = "acctestusrvm",
                    Password = "AccTestvdSC4daf986!",
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.azure.core.ResourceGroup;
    import com.pulumi.azure.core.ResourceGroupArgs;
    import com.pulumi.azure.storage.Account;
    import com.pulumi.azure.storage.AccountArgs;
    import com.pulumi.azure.storage.Container;
    import com.pulumi.azure.storage.ContainerArgs;
    import com.pulumi.azure.hdinsight.SparkCluster;
    import com.pulumi.azure.hdinsight.SparkClusterArgs;
    import com.pulumi.azure.hdinsight.inputs.SparkClusterComponentVersionArgs;
    import com.pulumi.azure.hdinsight.inputs.SparkClusterGatewayArgs;
    import com.pulumi.azure.hdinsight.inputs.SparkClusterStorageAccountArgs;
    import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesArgs;
    import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesHeadNodeArgs;
    import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesWorkerNodeArgs;
    import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesZookeeperNodeArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new ResourceGroup("example", ResourceGroupArgs.builder()        
                .name("example-resources")
                .location("West Europe")
                .build());
    
            var exampleAccount = new Account("exampleAccount", AccountArgs.builder()        
                .name("hdinsightstor")
                .resourceGroupName(example.name())
                .location(example.location())
                .accountTier("Standard")
                .accountReplicationType("LRS")
                .build());
    
            var exampleContainer = new Container("exampleContainer", ContainerArgs.builder()        
                .name("hdinsight")
                .storageAccountName(exampleAccount.name())
                .containerAccessType("private")
                .build());
    
            var exampleSparkCluster = new SparkCluster("exampleSparkCluster", SparkClusterArgs.builder()        
                .name("example-hdicluster")
                .resourceGroupName(example.name())
                .location(example.location())
                .clusterVersion("3.6")
                .tier("Standard")
                .componentVersion(SparkClusterComponentVersionArgs.builder()
                    .spark("2.3")
                    .build())
                .gateway(SparkClusterGatewayArgs.builder()
                    .username("acctestusrgw")
                    .password("Password123!")
                    .build())
                .storageAccounts(SparkClusterStorageAccountArgs.builder()
                    .storageContainerId(exampleContainer.id())
                    .storageAccountKey(exampleAccount.primaryAccessKey())
                    .isDefault(true)
                    .build())
                .roles(SparkClusterRolesArgs.builder()
                    .headNode(SparkClusterRolesHeadNodeArgs.builder()
                        .vmSize("Standard_A3")
                        .username("acctestusrvm")
                        .password("AccTestvdSC4daf986!")
                        .build())
                    .workerNode(SparkClusterRolesWorkerNodeArgs.builder()
                        .vmSize("Standard_A3")
                        .username("acctestusrvm")
                        .password("AccTestvdSC4daf986!")
                        .targetInstanceCount(3)
                        .build())
                    .zookeeperNode(SparkClusterRolesZookeeperNodeArgs.builder()
                        .vmSize("Medium")
                        .username("acctestusrvm")
                        .password("AccTestvdSC4daf986!")
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      example:
        type: azure:core:ResourceGroup
        properties:
          name: example-resources
          location: West Europe
      exampleAccount:
        type: azure:storage:Account
        name: example
        properties:
          name: hdinsightstor
          resourceGroupName: ${example.name}
          location: ${example.location}
          accountTier: Standard
          accountReplicationType: LRS
      exampleContainer:
        type: azure:storage:Container
        name: example
        properties:
          name: hdinsight
          storageAccountName: ${exampleAccount.name}
          containerAccessType: private
      exampleSparkCluster:
        type: azure:hdinsight:SparkCluster
        name: example
        properties:
          name: example-hdicluster
          resourceGroupName: ${example.name}
          location: ${example.location}
          clusterVersion: '3.6'
          tier: Standard
          componentVersion:
            spark: '2.3'
          gateway:
            username: acctestusrgw
            password: Password123!
          storageAccounts:
            - storageContainerId: ${exampleContainer.id}
              storageAccountKey: ${exampleAccount.primaryAccessKey}
              isDefault: true
          roles:
            headNode:
              vmSize: Standard_A3
              username: acctestusrvm
              password: AccTestvdSC4daf986!
            workerNode:
              vmSize: Standard_A3
              username: acctestusrvm
              password: AccTestvdSC4daf986!
              targetInstanceCount: 3
            zookeeperNode:
              vmSize: Medium
              username: acctestusrvm
              password: AccTestvdSC4daf986!
    

    Create SparkCluster Resource

    new SparkCluster(name: string, args: SparkClusterArgs, opts?: CustomResourceOptions);
    @overload
    def SparkCluster(resource_name: str,
                     opts: Optional[ResourceOptions] = None,
                     cluster_version: Optional[str] = None,
                     component_version: Optional[SparkClusterComponentVersionArgs] = None,
                     compute_isolation: Optional[SparkClusterComputeIsolationArgs] = None,
                     disk_encryptions: Optional[Sequence[SparkClusterDiskEncryptionArgs]] = None,
                     encryption_in_transit_enabled: Optional[bool] = None,
                     extension: Optional[SparkClusterExtensionArgs] = None,
                     gateway: Optional[SparkClusterGatewayArgs] = None,
                     location: Optional[str] = None,
                     metastores: Optional[SparkClusterMetastoresArgs] = None,
                     monitor: Optional[SparkClusterMonitorArgs] = None,
                     name: Optional[str] = None,
                     network: Optional[SparkClusterNetworkArgs] = None,
                     resource_group_name: Optional[str] = None,
                     roles: Optional[SparkClusterRolesArgs] = None,
                     security_profile: Optional[SparkClusterSecurityProfileArgs] = None,
                     storage_account_gen2: Optional[SparkClusterStorageAccountGen2Args] = None,
                     storage_accounts: Optional[Sequence[SparkClusterStorageAccountArgs]] = None,
                     tags: Optional[Mapping[str, str]] = None,
                     tier: Optional[str] = None,
                     tls_min_version: Optional[str] = None)
    @overload
    def SparkCluster(resource_name: str,
                     args: SparkClusterArgs,
                     opts: Optional[ResourceOptions] = None)
    func NewSparkCluster(ctx *Context, name string, args SparkClusterArgs, opts ...ResourceOption) (*SparkCluster, error)
    public SparkCluster(string name, SparkClusterArgs args, CustomResourceOptions? opts = null)
    public SparkCluster(String name, SparkClusterArgs args)
    public SparkCluster(String name, SparkClusterArgs args, CustomResourceOptions options)
    
    type: azure:hdinsight:SparkCluster
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    
    name string
    The unique name of the resource.
    args SparkClusterArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args SparkClusterArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args SparkClusterArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args SparkClusterArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args SparkClusterArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    SparkCluster Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The SparkCluster resource accepts the following input properties:

    ClusterVersion string
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    ComponentVersion SparkClusterComponentVersion
    A component_version block as defined below.
    Gateway SparkClusterGateway
    A gateway block as defined below.
    ResourceGroupName string
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Roles SparkClusterRoles
    A roles block as defined below.
    Tier string
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    ComputeIsolation SparkClusterComputeIsolation
    A compute_isolation block as defined below.
    DiskEncryptions List<SparkClusterDiskEncryption>
    One or more disk_encryption block as defined below.
    EncryptionInTransitEnabled bool
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    Extension SparkClusterExtension
    An extension block as defined below.
    Location string
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Metastores SparkClusterMetastores
    A metastores block as defined below.
    Monitor SparkClusterMonitor
    A monitor block as defined below.
    Name string
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    Network SparkClusterNetwork
    A network block as defined below.
    SecurityProfile SparkClusterSecurityProfile
    A security_profile block as defined below. Changing this forces a new resource to be created.
    StorageAccountGen2 SparkClusterStorageAccountGen2
    A storage_account_gen2 block as defined below.
    StorageAccounts List<SparkClusterStorageAccount>
    One or more storage_account block as defined below.
    Tags Dictionary<string, string>
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    TlsMinVersion string

    The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.

    NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.

    ClusterVersion string
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    ComponentVersion SparkClusterComponentVersionArgs
    A component_version block as defined below.
    Gateway SparkClusterGatewayArgs
    A gateway block as defined below.
    ResourceGroupName string
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Roles SparkClusterRolesArgs
    A roles block as defined below.
    Tier string
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    ComputeIsolation SparkClusterComputeIsolationArgs
    A compute_isolation block as defined below.
    DiskEncryptions []SparkClusterDiskEncryptionArgs
    One or more disk_encryption block as defined below.
    EncryptionInTransitEnabled bool
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    Extension SparkClusterExtensionArgs
    An extension block as defined below.
    Location string
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Metastores SparkClusterMetastoresArgs
    A metastores block as defined below.
    Monitor SparkClusterMonitorArgs
    A monitor block as defined below.
    Name string
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    Network SparkClusterNetworkArgs
    A network block as defined below.
    SecurityProfile SparkClusterSecurityProfileArgs
    A security_profile block as defined below. Changing this forces a new resource to be created.
    StorageAccountGen2 SparkClusterStorageAccountGen2Args
    A storage_account_gen2 block as defined below.
    StorageAccounts []SparkClusterStorageAccountArgs
    One or more storage_account block as defined below.
    Tags map[string]string
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    TlsMinVersion string

    The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.

    NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.

    clusterVersion String
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    componentVersion SparkClusterComponentVersion
    A component_version block as defined below.
    gateway SparkClusterGateway
    A gateway block as defined below.
    resourceGroupName String
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles SparkClusterRoles
    A roles block as defined below.
    tier String
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    computeIsolation SparkClusterComputeIsolation
    A compute_isolation block as defined below.
    diskEncryptions List<SparkClusterDiskEncryption>
    One or more disk_encryption block as defined below.
    encryptionInTransitEnabled Boolean
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    extension SparkClusterExtension
    An extension block as defined below.
    location String
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores SparkClusterMetastores
    A metastores block as defined below.
    monitor SparkClusterMonitor
    A monitor block as defined below.
    name String
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network SparkClusterNetwork
    A network block as defined below.
    securityProfile SparkClusterSecurityProfile
    A security_profile block as defined below. Changing this forces a new resource to be created.
    storageAccountGen2 SparkClusterStorageAccountGen2
    A storage_account_gen2 block as defined below.
    storageAccounts List<SparkClusterStorageAccount>
    One or more storage_account block as defined below.
    tags Map<String,String>
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tlsMinVersion String

    The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.

    NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.

    clusterVersion string
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    componentVersion SparkClusterComponentVersion
    A component_version block as defined below.
    gateway SparkClusterGateway
    A gateway block as defined below.
    resourceGroupName string
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles SparkClusterRoles
    A roles block as defined below.
    tier string
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    computeIsolation SparkClusterComputeIsolation
    A compute_isolation block as defined below.
    diskEncryptions SparkClusterDiskEncryption[]
    One or more disk_encryption block as defined below.
    encryptionInTransitEnabled boolean
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    extension SparkClusterExtension
    An extension block as defined below.
    location string
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores SparkClusterMetastores
    A metastores block as defined below.
    monitor SparkClusterMonitor
    A monitor block as defined below.
    name string
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network SparkClusterNetwork
    A network block as defined below.
    securityProfile SparkClusterSecurityProfile
    A security_profile block as defined below. Changing this forces a new resource to be created.
    storageAccountGen2 SparkClusterStorageAccountGen2
    A storage_account_gen2 block as defined below.
    storageAccounts SparkClusterStorageAccount[]
    One or more storage_account block as defined below.
    tags {[key: string]: string}
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tlsMinVersion string

    The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.

    NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.

    cluster_version str
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    component_version SparkClusterComponentVersionArgs
    A component_version block as defined below.
    gateway SparkClusterGatewayArgs
    A gateway block as defined below.
    resource_group_name str
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles SparkClusterRolesArgs
    A roles block as defined below.
    tier str
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    compute_isolation SparkClusterComputeIsolationArgs
    A compute_isolation block as defined below.
    disk_encryptions Sequence[SparkClusterDiskEncryptionArgs]
    One or more disk_encryption block as defined below.
    encryption_in_transit_enabled bool
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    extension SparkClusterExtensionArgs
    An extension block as defined below.
    location str
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores SparkClusterMetastoresArgs
    A metastores block as defined below.
    monitor SparkClusterMonitorArgs
    A monitor block as defined below.
    name str
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network SparkClusterNetworkArgs
    A network block as defined below.
    security_profile SparkClusterSecurityProfileArgs
    A security_profile block as defined below. Changing this forces a new resource to be created.
    storage_account_gen2 SparkClusterStorageAccountGen2Args
    A storage_account_gen2 block as defined below.
    storage_accounts Sequence[SparkClusterStorageAccountArgs]
    One or more storage_account block as defined below.
    tags Mapping[str, str]
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tls_min_version str

    The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.

    NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.

    clusterVersion String
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    componentVersion Property Map
    A component_version block as defined below.
    gateway Property Map
    A gateway block as defined below.
    resourceGroupName String
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles Property Map
    A roles block as defined below.
    tier String
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    computeIsolation Property Map
    A compute_isolation block as defined below.
    diskEncryptions List<Property Map>
    One or more disk_encryption block as defined below.
    encryptionInTransitEnabled Boolean
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    extension Property Map
    An extension block as defined below.
    location String
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores Property Map
    A metastores block as defined below.
    monitor Property Map
    A monitor block as defined below.
    name String
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network Property Map
    A network block as defined below.
    securityProfile Property Map
    A security_profile block as defined below. Changing this forces a new resource to be created.
    storageAccountGen2 Property Map
    A storage_account_gen2 block as defined below.
    storageAccounts List<Property Map>
    One or more storage_account block as defined below.
    tags Map<String>
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tlsMinVersion String

    The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.

    NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the SparkCluster resource produces the following output properties:

    HttpsEndpoint string
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    Id string
    The provider-assigned unique ID for this managed resource.
    SshEndpoint string
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    HttpsEndpoint string
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    Id string
    The provider-assigned unique ID for this managed resource.
    SshEndpoint string
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    httpsEndpoint String
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    id String
    The provider-assigned unique ID for this managed resource.
    sshEndpoint String
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    httpsEndpoint string
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    id string
    The provider-assigned unique ID for this managed resource.
    sshEndpoint string
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    https_endpoint str
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    id str
    The provider-assigned unique ID for this managed resource.
    ssh_endpoint str
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    httpsEndpoint String
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    id String
    The provider-assigned unique ID for this managed resource.
    sshEndpoint String
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.

    Look up Existing SparkCluster Resource

    Get an existing SparkCluster resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: SparkClusterState, opts?: CustomResourceOptions): SparkCluster
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            cluster_version: Optional[str] = None,
            component_version: Optional[SparkClusterComponentVersionArgs] = None,
            compute_isolation: Optional[SparkClusterComputeIsolationArgs] = None,
            disk_encryptions: Optional[Sequence[SparkClusterDiskEncryptionArgs]] = None,
            encryption_in_transit_enabled: Optional[bool] = None,
            extension: Optional[SparkClusterExtensionArgs] = None,
            gateway: Optional[SparkClusterGatewayArgs] = None,
            https_endpoint: Optional[str] = None,
            location: Optional[str] = None,
            metastores: Optional[SparkClusterMetastoresArgs] = None,
            monitor: Optional[SparkClusterMonitorArgs] = None,
            name: Optional[str] = None,
            network: Optional[SparkClusterNetworkArgs] = None,
            resource_group_name: Optional[str] = None,
            roles: Optional[SparkClusterRolesArgs] = None,
            security_profile: Optional[SparkClusterSecurityProfileArgs] = None,
            ssh_endpoint: Optional[str] = None,
            storage_account_gen2: Optional[SparkClusterStorageAccountGen2Args] = None,
            storage_accounts: Optional[Sequence[SparkClusterStorageAccountArgs]] = None,
            tags: Optional[Mapping[str, str]] = None,
            tier: Optional[str] = None,
            tls_min_version: Optional[str] = None) -> SparkCluster
    func GetSparkCluster(ctx *Context, name string, id IDInput, state *SparkClusterState, opts ...ResourceOption) (*SparkCluster, error)
    public static SparkCluster Get(string name, Input<string> id, SparkClusterState? state, CustomResourceOptions? opts = null)
    public static SparkCluster get(String name, Output<String> id, SparkClusterState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    ClusterVersion string
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    ComponentVersion SparkClusterComponentVersion
    A component_version block as defined below.
    ComputeIsolation SparkClusterComputeIsolation
    A compute_isolation block as defined below.
    DiskEncryptions List<SparkClusterDiskEncryption>
    One or more disk_encryption block as defined below.
    EncryptionInTransitEnabled bool
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    Extension SparkClusterExtension
    An extension block as defined below.
    Gateway SparkClusterGateway
    A gateway block as defined below.
    HttpsEndpoint string
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    Location string
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Metastores SparkClusterMetastores
    A metastores block as defined below.
    Monitor SparkClusterMonitor
    A monitor block as defined below.
    Name string
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    Network SparkClusterNetwork
    A network block as defined below.
    ResourceGroupName string
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Roles SparkClusterRoles
    A roles block as defined below.
    SecurityProfile SparkClusterSecurityProfile
    A security_profile block as defined below. Changing this forces a new resource to be created.
    SshEndpoint string
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    StorageAccountGen2 SparkClusterStorageAccountGen2
    A storage_account_gen2 block as defined below.
    StorageAccounts List<SparkClusterStorageAccount>
    One or more storage_account block as defined below.
    Tags Dictionary<string, string>
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    Tier string
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    TlsMinVersion string

    The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.

    NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.

    ClusterVersion string
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    ComponentVersion SparkClusterComponentVersionArgs
    A component_version block as defined below.
    ComputeIsolation SparkClusterComputeIsolationArgs
    A compute_isolation block as defined below.
    DiskEncryptions []SparkClusterDiskEncryptionArgs
    One or more disk_encryption block as defined below.
    EncryptionInTransitEnabled bool
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    Extension SparkClusterExtensionArgs
    An extension block as defined below.
    Gateway SparkClusterGatewayArgs
    A gateway block as defined below.
    HttpsEndpoint string
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    Location string
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Metastores SparkClusterMetastoresArgs
    A metastores block as defined below.
    Monitor SparkClusterMonitorArgs
    A monitor block as defined below.
    Name string
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    Network SparkClusterNetworkArgs
    A network block as defined below.
    ResourceGroupName string
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    Roles SparkClusterRolesArgs
    A roles block as defined below.
    SecurityProfile SparkClusterSecurityProfileArgs
    A security_profile block as defined below. Changing this forces a new resource to be created.
    SshEndpoint string
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    StorageAccountGen2 SparkClusterStorageAccountGen2Args
    A storage_account_gen2 block as defined below.
    StorageAccounts []SparkClusterStorageAccountArgs
    One or more storage_account block as defined below.
    Tags map[string]string
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    Tier string
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    TlsMinVersion string

    The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.

    NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.

    clusterVersion String
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    componentVersion SparkClusterComponentVersion
    A component_version block as defined below.
    computeIsolation SparkClusterComputeIsolation
    A compute_isolation block as defined below.
    diskEncryptions List<SparkClusterDiskEncryption>
    One or more disk_encryption block as defined below.
    encryptionInTransitEnabled Boolean
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    extension SparkClusterExtension
    An extension block as defined below.
    gateway SparkClusterGateway
    A gateway block as defined below.
    httpsEndpoint String
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    location String
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores SparkClusterMetastores
    A metastores block as defined below.
    monitor SparkClusterMonitor
    A monitor block as defined below.
    name String
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network SparkClusterNetwork
    A network block as defined below.
    resourceGroupName String
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles SparkClusterRoles
    A roles block as defined below.
    securityProfile SparkClusterSecurityProfile
    A security_profile block as defined below. Changing this forces a new resource to be created.
    sshEndpoint String
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    storageAccountGen2 SparkClusterStorageAccountGen2
    A storage_account_gen2 block as defined below.
    storageAccounts List<SparkClusterStorageAccount>
    One or more storage_account block as defined below.
    tags Map<String,String>
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tier String
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    tlsMinVersion String

    The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.

    NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.

    clusterVersion string
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    componentVersion SparkClusterComponentVersion
    A component_version block as defined below.
    computeIsolation SparkClusterComputeIsolation
    A compute_isolation block as defined below.
    diskEncryptions SparkClusterDiskEncryption[]
    One or more disk_encryption block as defined below.
    encryptionInTransitEnabled boolean
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    extension SparkClusterExtension
    An extension block as defined below.
    gateway SparkClusterGateway
    A gateway block as defined below.
    httpsEndpoint string
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    location string
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores SparkClusterMetastores
    A metastores block as defined below.
    monitor SparkClusterMonitor
    A monitor block as defined below.
    name string
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network SparkClusterNetwork
    A network block as defined below.
    resourceGroupName string
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles SparkClusterRoles
    A roles block as defined below.
    securityProfile SparkClusterSecurityProfile
    A security_profile block as defined below. Changing this forces a new resource to be created.
    sshEndpoint string
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    storageAccountGen2 SparkClusterStorageAccountGen2
    A storage_account_gen2 block as defined below.
    storageAccounts SparkClusterStorageAccount[]
    One or more storage_account block as defined below.
    tags {[key: string]: string}
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tier string
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    tlsMinVersion string

    The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.

    NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.

    cluster_version str
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    component_version SparkClusterComponentVersionArgs
    A component_version block as defined below.
    compute_isolation SparkClusterComputeIsolationArgs
    A compute_isolation block as defined below.
    disk_encryptions Sequence[SparkClusterDiskEncryptionArgs]
    One or more disk_encryption block as defined below.
    encryption_in_transit_enabled bool
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    extension SparkClusterExtensionArgs
    An extension block as defined below.
    gateway SparkClusterGatewayArgs
    A gateway block as defined below.
    https_endpoint str
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    location str
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores SparkClusterMetastoresArgs
    A metastores block as defined below.
    monitor SparkClusterMonitorArgs
    A monitor block as defined below.
    name str
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network SparkClusterNetworkArgs
    A network block as defined below.
    resource_group_name str
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles SparkClusterRolesArgs
    A roles block as defined below.
    security_profile SparkClusterSecurityProfileArgs
    A security_profile block as defined below. Changing this forces a new resource to be created.
    ssh_endpoint str
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    storage_account_gen2 SparkClusterStorageAccountGen2Args
    A storage_account_gen2 block as defined below.
    storage_accounts Sequence[SparkClusterStorageAccountArgs]
    One or more storage_account block as defined below.
    tags Mapping[str, str]
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tier str
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    tls_min_version str

    The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.

    NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.

    clusterVersion String
    Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
    componentVersion Property Map
    A component_version block as defined below.
    computeIsolation Property Map
    A compute_isolation block as defined below.
    diskEncryptions List<Property Map>
    One or more disk_encryption block as defined below.
    encryptionInTransitEnabled Boolean
    Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
    extension Property Map
    An extension block as defined below.
    gateway Property Map
    A gateway block as defined below.
    httpsEndpoint String
    The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
    location String
    Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    metastores Property Map
    A metastores block as defined below.
    monitor Property Map
    A monitor block as defined below.
    name String
    Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    network Property Map
    A network block as defined below.
    resourceGroupName String
    Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
    roles Property Map
    A roles block as defined below.
    securityProfile Property Map
    A security_profile block as defined below. Changing this forces a new resource to be created.
    sshEndpoint String
    The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
    storageAccountGen2 Property Map
    A storage_account_gen2 block as defined below.
    storageAccounts List<Property Map>
    One or more storage_account block as defined below.
    tags Map<String>
    A map of Tags which should be assigned to this HDInsight Spark Cluster.
    tier String
    Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are Standard or Premium. Changing this forces a new resource to be created.
    tlsMinVersion String

    The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.

    NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.

    Supporting Types

    SparkClusterComponentVersion, SparkClusterComponentVersionArgs

    Spark string
    The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    Spark string
    The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    spark String
    The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    spark string
    The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    spark str
    The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
    spark String
    The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.

    SparkClusterComputeIsolation, SparkClusterComputeIsolationArgs

    ComputeIsolationEnabled bool
    This field indicates whether enable compute isolation or not. Possible values are true or false.
    HostSku string
    The name of the host SKU.
    ComputeIsolationEnabled bool
    This field indicates whether enable compute isolation or not. Possible values are true or false.
    HostSku string
    The name of the host SKU.
    computeIsolationEnabled Boolean
    This field indicates whether enable compute isolation or not. Possible values are true or false.
    hostSku String
    The name of the host SKU.
    computeIsolationEnabled boolean
    This field indicates whether enable compute isolation or not. Possible values are true or false.
    hostSku string
    The name of the host SKU.
    compute_isolation_enabled bool
    This field indicates whether enable compute isolation or not. Possible values are true or false.
    host_sku str
    The name of the host SKU.
    computeIsolationEnabled Boolean
    This field indicates whether enable compute isolation or not. Possible values are true or false.
    hostSku String
    The name of the host SKU.

    SparkClusterDiskEncryption, SparkClusterDiskEncryptionArgs

    EncryptionAlgorithm string
    This is an algorithm identifier for encryption. Possible values are RSA1_5, RSA-OAEP, RSA-OAEP-256.
    EncryptionAtHostEnabled bool
    This is indicator to show whether resource disk encryption is enabled.
    KeyVaultKeyId string
    The ID of the key vault key.
    KeyVaultManagedIdentityId string
    This is the resource ID of Managed Identity used to access the key vault.
    EncryptionAlgorithm string
    This is an algorithm identifier for encryption. Possible values are RSA1_5, RSA-OAEP, RSA-OAEP-256.
    EncryptionAtHostEnabled bool
    This is indicator to show whether resource disk encryption is enabled.
    KeyVaultKeyId string
    The ID of the key vault key.
    KeyVaultManagedIdentityId string
    This is the resource ID of Managed Identity used to access the key vault.
    encryptionAlgorithm String
    This is an algorithm identifier for encryption. Possible values are RSA1_5, RSA-OAEP, RSA-OAEP-256.
    encryptionAtHostEnabled Boolean
    This is indicator to show whether resource disk encryption is enabled.
    keyVaultKeyId String
    The ID of the key vault key.
    keyVaultManagedIdentityId String
    This is the resource ID of Managed Identity used to access the key vault.
    encryptionAlgorithm string
    This is an algorithm identifier for encryption. Possible values are RSA1_5, RSA-OAEP, RSA-OAEP-256.
    encryptionAtHostEnabled boolean
    This is indicator to show whether resource disk encryption is enabled.
    keyVaultKeyId string
    The ID of the key vault key.
    keyVaultManagedIdentityId string
    This is the resource ID of Managed Identity used to access the key vault.
    encryption_algorithm str
    This is an algorithm identifier for encryption. Possible values are RSA1_5, RSA-OAEP, RSA-OAEP-256.
    encryption_at_host_enabled bool
    This is indicator to show whether resource disk encryption is enabled.
    key_vault_key_id str
    The ID of the key vault key.
    key_vault_managed_identity_id str
    This is the resource ID of Managed Identity used to access the key vault.
    encryptionAlgorithm String
    This is an algorithm identifier for encryption. Possible values are RSA1_5, RSA-OAEP, RSA-OAEP-256.
    encryptionAtHostEnabled Boolean
    This is indicator to show whether resource disk encryption is enabled.
    keyVaultKeyId String
    The ID of the key vault key.
    keyVaultManagedIdentityId String
    This is the resource ID of Managed Identity used to access the key vault.

    SparkClusterExtension, SparkClusterExtensionArgs

    LogAnalyticsWorkspaceId string
    The workspace ID of the log analytics extension.
    PrimaryKey string
    The workspace key of the log analytics extension.
    LogAnalyticsWorkspaceId string
    The workspace ID of the log analytics extension.
    PrimaryKey string
    The workspace key of the log analytics extension.
    logAnalyticsWorkspaceId String
    The workspace ID of the log analytics extension.
    primaryKey String
    The workspace key of the log analytics extension.
    logAnalyticsWorkspaceId string
    The workspace ID of the log analytics extension.
    primaryKey string
    The workspace key of the log analytics extension.
    log_analytics_workspace_id str
    The workspace ID of the log analytics extension.
    primary_key str
    The workspace key of the log analytics extension.
    logAnalyticsWorkspaceId String
    The workspace ID of the log analytics extension.
    primaryKey String
    The workspace key of the log analytics extension.

    SparkClusterGateway, SparkClusterGatewayArgs

    Password string

    The password used for the Ambari Portal.

    NOTE: This password must be different from the one used for the head_node, worker_node and zookeeper_node roles.

    Username string
    The username used for the Ambari Portal. Changing this forces a new resource to be created.
    Password string

    The password used for the Ambari Portal.

    NOTE: This password must be different from the one used for the head_node, worker_node and zookeeper_node roles.

    Username string
    The username used for the Ambari Portal. Changing this forces a new resource to be created.
    password String

    The password used for the Ambari Portal.

    NOTE: This password must be different from the one used for the head_node, worker_node and zookeeper_node roles.

    username String
    The username used for the Ambari Portal. Changing this forces a new resource to be created.
    password string

    The password used for the Ambari Portal.

    NOTE: This password must be different from the one used for the head_node, worker_node and zookeeper_node roles.

    username string
    The username used for the Ambari Portal. Changing this forces a new resource to be created.
    password str

    The password used for the Ambari Portal.

    NOTE: This password must be different from the one used for the head_node, worker_node and zookeeper_node roles.

    username str
    The username used for the Ambari Portal. Changing this forces a new resource to be created.
    password String

    The password used for the Ambari Portal.

    NOTE: This password must be different from the one used for the head_node, worker_node and zookeeper_node roles.

    username String
    The username used for the Ambari Portal. Changing this forces a new resource to be created.

    SparkClusterMetastores, SparkClusterMetastoresArgs

    Ambari SparkClusterMetastoresAmbari
    An ambari block as defined below.
    Hive SparkClusterMetastoresHive
    A hive block as defined below.
    Oozie SparkClusterMetastoresOozie
    An oozie block as defined below.
    Ambari SparkClusterMetastoresAmbari
    An ambari block as defined below.
    Hive SparkClusterMetastoresHive
    A hive block as defined below.
    Oozie SparkClusterMetastoresOozie
    An oozie block as defined below.
    ambari SparkClusterMetastoresAmbari
    An ambari block as defined below.
    hive SparkClusterMetastoresHive
    A hive block as defined below.
    oozie SparkClusterMetastoresOozie
    An oozie block as defined below.
    ambari SparkClusterMetastoresAmbari
    An ambari block as defined below.
    hive SparkClusterMetastoresHive
    A hive block as defined below.
    oozie SparkClusterMetastoresOozie
    An oozie block as defined below.
    ambari SparkClusterMetastoresAmbari
    An ambari block as defined below.
    hive SparkClusterMetastoresHive
    A hive block as defined below.
    oozie SparkClusterMetastoresOozie
    An oozie block as defined below.
    ambari Property Map
    An ambari block as defined below.
    hive Property Map
    A hive block as defined below.
    oozie Property Map
    An oozie block as defined below.

    SparkClusterMetastoresAmbari, SparkClusterMetastoresAmbariArgs

    DatabaseName string
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    Password string
    The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    Server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
    Username string
    The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    DatabaseName string
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    Password string
    The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    Server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
    Username string
    The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName String
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password String
    The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server String
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
    username String
    The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName string
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password string
    The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
    username string
    The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    database_name str
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password str
    The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server str
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
    username str
    The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName String
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password String
    The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server String
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
    username String
    The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.

    SparkClusterMetastoresHive, SparkClusterMetastoresHiveArgs

    DatabaseName string
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    Password string
    The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    Server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
    Username string
    The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    DatabaseName string
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    Password string
    The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    Server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
    Username string
    The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName String
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password String
    The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server String
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
    username String
    The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName string
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password string
    The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
    username string
    The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    database_name str
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password str
    The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server str
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
    username str
    The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName String
    The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
    password String
    The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server String
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
    username String
    The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.

    SparkClusterMetastoresOozie, SparkClusterMetastoresOozieArgs

    DatabaseName string
    The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
    Password string
    The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    Server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
    Username string
    The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    DatabaseName string
    The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
    Password string
    The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    Server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
    Username string
    The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName String
    The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
    password String
    The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server String
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
    username String
    The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName string
    The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
    password string
    The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server string
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
    username string
    The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    database_name str
    The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
    password str
    The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server str
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
    username str
    The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
    databaseName String
    The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
    password String
    The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
    server String
    The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
    username String
    The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.

    SparkClusterMonitor, SparkClusterMonitorArgs

    LogAnalyticsWorkspaceId string
    The Operations Management Suite (OMS) workspace ID.
    PrimaryKey string
    The Operations Management Suite (OMS) workspace key.
    LogAnalyticsWorkspaceId string
    The Operations Management Suite (OMS) workspace ID.
    PrimaryKey string
    The Operations Management Suite (OMS) workspace key.
    logAnalyticsWorkspaceId String
    The Operations Management Suite (OMS) workspace ID.
    primaryKey String
    The Operations Management Suite (OMS) workspace key.
    logAnalyticsWorkspaceId string
    The Operations Management Suite (OMS) workspace ID.
    primaryKey string
    The Operations Management Suite (OMS) workspace key.
    log_analytics_workspace_id str
    The Operations Management Suite (OMS) workspace ID.
    primary_key str
    The Operations Management Suite (OMS) workspace key.
    logAnalyticsWorkspaceId String
    The Operations Management Suite (OMS) workspace ID.
    primaryKey String
    The Operations Management Suite (OMS) workspace key.

    SparkClusterNetwork, SparkClusterNetworkArgs

    ConnectionDirection string

    The direction of the resource provider connection. Possible values include Inbound or Outbound. Defaults to Inbound. Changing this forces a new resource to be created.

    NOTE: To enabled the private link the connection_direction must be set to Outbound.

    PrivateLinkEnabled bool
    Is the private link enabled? Possible values include true or false. Defaults to false. Changing this forces a new resource to be created.
    ConnectionDirection string

    The direction of the resource provider connection. Possible values include Inbound or Outbound. Defaults to Inbound. Changing this forces a new resource to be created.

    NOTE: To enabled the private link the connection_direction must be set to Outbound.

    PrivateLinkEnabled bool
    Is the private link enabled? Possible values include true or false. Defaults to false. Changing this forces a new resource to be created.
    connectionDirection String

    The direction of the resource provider connection. Possible values include Inbound or Outbound. Defaults to Inbound. Changing this forces a new resource to be created.

    NOTE: To enabled the private link the connection_direction must be set to Outbound.

    privateLinkEnabled Boolean
    Is the private link enabled? Possible values include true or false. Defaults to false. Changing this forces a new resource to be created.
    connectionDirection string

    The direction of the resource provider connection. Possible values include Inbound or Outbound. Defaults to Inbound. Changing this forces a new resource to be created.

    NOTE: To enabled the private link the connection_direction must be set to Outbound.

    privateLinkEnabled boolean
    Is the private link enabled? Possible values include true or false. Defaults to false. Changing this forces a new resource to be created.
    connection_direction str

    The direction of the resource provider connection. Possible values include Inbound or Outbound. Defaults to Inbound. Changing this forces a new resource to be created.

    NOTE: To enabled the private link the connection_direction must be set to Outbound.

    private_link_enabled bool
    Is the private link enabled? Possible values include true or false. Defaults to false. Changing this forces a new resource to be created.
    connectionDirection String

    The direction of the resource provider connection. Possible values include Inbound or Outbound. Defaults to Inbound. Changing this forces a new resource to be created.

    NOTE: To enabled the private link the connection_direction must be set to Outbound.

    privateLinkEnabled Boolean
    Is the private link enabled? Possible values include true or false. Defaults to false. Changing this forces a new resource to be created.

    SparkClusterRoles, SparkClusterRolesArgs

    HeadNode SparkClusterRolesHeadNode
    A head_node block as defined above.
    WorkerNode SparkClusterRolesWorkerNode
    A worker_node block as defined below.
    ZookeeperNode SparkClusterRolesZookeeperNode
    A zookeeper_node block as defined below.
    HeadNode SparkClusterRolesHeadNode
    A head_node block as defined above.
    WorkerNode SparkClusterRolesWorkerNode
    A worker_node block as defined below.
    ZookeeperNode SparkClusterRolesZookeeperNode
    A zookeeper_node block as defined below.
    headNode SparkClusterRolesHeadNode
    A head_node block as defined above.
    workerNode SparkClusterRolesWorkerNode
    A worker_node block as defined below.
    zookeeperNode SparkClusterRolesZookeeperNode
    A zookeeper_node block as defined below.
    headNode SparkClusterRolesHeadNode
    A head_node block as defined above.
    workerNode SparkClusterRolesWorkerNode
    A worker_node block as defined below.
    zookeeperNode SparkClusterRolesZookeeperNode
    A zookeeper_node block as defined below.
    head_node SparkClusterRolesHeadNode
    A head_node block as defined above.
    worker_node SparkClusterRolesWorkerNode
    A worker_node block as defined below.
    zookeeper_node SparkClusterRolesZookeeperNode
    A zookeeper_node block as defined below.
    headNode Property Map
    A head_node block as defined above.
    workerNode Property Map
    A worker_node block as defined below.
    zookeeperNode Property Map
    A zookeeper_node block as defined below.

    SparkClusterRolesHeadNode, SparkClusterRolesHeadNodeArgs

    Username string
    The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    VmSize string
    The Size of the Virtual Machine which should be used as the Head Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    Password string

    The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    ScriptActions List<SparkClusterRolesHeadNodeScriptAction>
    The script action which will run on the cluster. One or more script_actions blocks as defined below.
    SshKeys List<string>

    A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    SubnetId string
    The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    VirtualNetworkId string
    The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    Username string
    The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    VmSize string
    The Size of the Virtual Machine which should be used as the Head Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    Password string

    The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    ScriptActions []SparkClusterRolesHeadNodeScriptAction
    The script action which will run on the cluster. One or more script_actions blocks as defined below.
    SshKeys []string

    A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    SubnetId string
    The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    VirtualNetworkId string
    The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    username String
    The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    vmSize String
    The Size of the Virtual Machine which should be used as the Head Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    password String

    The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    scriptActions List<SparkClusterRolesHeadNodeScriptAction>
    The script action which will run on the cluster. One or more script_actions blocks as defined below.
    sshKeys List<String>

    A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    subnetId String
    The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId String
    The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    username string
    The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    vmSize string
    The Size of the Virtual Machine which should be used as the Head Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    password string

    The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    scriptActions SparkClusterRolesHeadNodeScriptAction[]
    The script action which will run on the cluster. One or more script_actions blocks as defined below.
    sshKeys string[]

    A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    subnetId string
    The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId string
    The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    username str
    The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    vm_size str
    The Size of the Virtual Machine which should be used as the Head Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    password str

    The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    script_actions Sequence[SparkClusterRolesHeadNodeScriptAction]
    The script action which will run on the cluster. One or more script_actions blocks as defined below.
    ssh_keys Sequence[str]

    A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    subnet_id str
    The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtual_network_id str
    The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    username String
    The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
    vmSize String
    The Size of the Virtual Machine which should be used as the Head Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    password String

    The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    scriptActions List<Property Map>
    The script action which will run on the cluster. One or more script_actions blocks as defined below.
    sshKeys List<String>

    A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    subnetId String
    The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId String
    The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.

    SparkClusterRolesHeadNodeScriptAction, SparkClusterRolesHeadNodeScriptActionArgs

    Name string
    The name of the script action.
    Uri string
    The URI to the script.
    Parameters string
    The parameters for the script provided.
    Name string
    The name of the script action.
    Uri string
    The URI to the script.
    Parameters string
    The parameters for the script provided.
    name String
    The name of the script action.
    uri String
    The URI to the script.
    parameters String
    The parameters for the script provided.
    name string
    The name of the script action.
    uri string
    The URI to the script.
    parameters string
    The parameters for the script provided.
    name str
    The name of the script action.
    uri str
    The URI to the script.
    parameters str
    The parameters for the script provided.
    name String
    The name of the script action.
    uri String
    The URI to the script.
    parameters String
    The parameters for the script provided.

    SparkClusterRolesWorkerNode, SparkClusterRolesWorkerNodeArgs

    TargetInstanceCount int
    The number of instances which should be run for the Worker Nodes.
    Username string
    The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    VmSize string
    The Size of the Virtual Machine which should be used as the Worker Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    Autoscale SparkClusterRolesWorkerNodeAutoscale
    A autoscale block as defined below.
    Password string

    The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    ScriptActions List<SparkClusterRolesWorkerNodeScriptAction>
    The script action which will run on the cluster. One or more script_actions blocks as defined above.
    SshKeys List<string>

    A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    SubnetId string
    The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    VirtualNetworkId string
    The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    TargetInstanceCount int
    The number of instances which should be run for the Worker Nodes.
    Username string
    The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    VmSize string
    The Size of the Virtual Machine which should be used as the Worker Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    Autoscale SparkClusterRolesWorkerNodeAutoscale
    A autoscale block as defined below.
    Password string

    The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    ScriptActions []SparkClusterRolesWorkerNodeScriptAction
    The script action which will run on the cluster. One or more script_actions blocks as defined above.
    SshKeys []string

    A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    SubnetId string
    The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    VirtualNetworkId string
    The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    targetInstanceCount Integer
    The number of instances which should be run for the Worker Nodes.
    username String
    The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    vmSize String
    The Size of the Virtual Machine which should be used as the Worker Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    autoscale SparkClusterRolesWorkerNodeAutoscale
    A autoscale block as defined below.
    password String

    The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    scriptActions List<SparkClusterRolesWorkerNodeScriptAction>
    The script action which will run on the cluster. One or more script_actions blocks as defined above.
    sshKeys List<String>

    A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    subnetId String
    The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId String
    The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    targetInstanceCount number
    The number of instances which should be run for the Worker Nodes.
    username string
    The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    vmSize string
    The Size of the Virtual Machine which should be used as the Worker Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    autoscale SparkClusterRolesWorkerNodeAutoscale
    A autoscale block as defined below.
    password string

    The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    scriptActions SparkClusterRolesWorkerNodeScriptAction[]
    The script action which will run on the cluster. One or more script_actions blocks as defined above.
    sshKeys string[]

    A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    subnetId string
    The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId string
    The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    target_instance_count int
    The number of instances which should be run for the Worker Nodes.
    username str
    The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    vm_size str
    The Size of the Virtual Machine which should be used as the Worker Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    autoscale SparkClusterRolesWorkerNodeAutoscale
    A autoscale block as defined below.
    password str

    The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    script_actions Sequence[SparkClusterRolesWorkerNodeScriptAction]
    The script action which will run on the cluster. One or more script_actions blocks as defined above.
    ssh_keys Sequence[str]

    A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    subnet_id str
    The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtual_network_id str
    The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    targetInstanceCount Number
    The number of instances which should be run for the Worker Nodes.
    username String
    The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
    vmSize String
    The Size of the Virtual Machine which should be used as the Worker Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    autoscale Property Map
    A autoscale block as defined below.
    password String

    The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    scriptActions List<Property Map>
    The script action which will run on the cluster. One or more script_actions blocks as defined above.
    sshKeys List<String>

    A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    subnetId String
    The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId String
    The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.

    SparkClusterRolesWorkerNodeAutoscale, SparkClusterRolesWorkerNodeAutoscaleArgs

    Capacity SparkClusterRolesWorkerNodeAutoscaleCapacity
    A capacity block as defined below.
    Recurrence SparkClusterRolesWorkerNodeAutoscaleRecurrence

    A recurrence block as defined below.

    NOTE: Either a capacity or recurrence block must be specified - but not both.

    Capacity SparkClusterRolesWorkerNodeAutoscaleCapacity
    A capacity block as defined below.
    Recurrence SparkClusterRolesWorkerNodeAutoscaleRecurrence

    A recurrence block as defined below.

    NOTE: Either a capacity or recurrence block must be specified - but not both.

    capacity SparkClusterRolesWorkerNodeAutoscaleCapacity
    A capacity block as defined below.
    recurrence SparkClusterRolesWorkerNodeAutoscaleRecurrence

    A recurrence block as defined below.

    NOTE: Either a capacity or recurrence block must be specified - but not both.

    capacity SparkClusterRolesWorkerNodeAutoscaleCapacity
    A capacity block as defined below.
    recurrence SparkClusterRolesWorkerNodeAutoscaleRecurrence

    A recurrence block as defined below.

    NOTE: Either a capacity or recurrence block must be specified - but not both.

    capacity SparkClusterRolesWorkerNodeAutoscaleCapacity
    A capacity block as defined below.
    recurrence SparkClusterRolesWorkerNodeAutoscaleRecurrence

    A recurrence block as defined below.

    NOTE: Either a capacity or recurrence block must be specified - but not both.

    capacity Property Map
    A capacity block as defined below.
    recurrence Property Map

    A recurrence block as defined below.

    NOTE: Either a capacity or recurrence block must be specified - but not both.

    SparkClusterRolesWorkerNodeAutoscaleCapacity, SparkClusterRolesWorkerNodeAutoscaleCapacityArgs

    MaxInstanceCount int
    The maximum number of worker nodes to autoscale to based on the cluster's activity.
    MinInstanceCount int
    The minimum number of worker nodes to autoscale to based on the cluster's activity.
    MaxInstanceCount int
    The maximum number of worker nodes to autoscale to based on the cluster's activity.
    MinInstanceCount int
    The minimum number of worker nodes to autoscale to based on the cluster's activity.
    maxInstanceCount Integer
    The maximum number of worker nodes to autoscale to based on the cluster's activity.
    minInstanceCount Integer
    The minimum number of worker nodes to autoscale to based on the cluster's activity.
    maxInstanceCount number
    The maximum number of worker nodes to autoscale to based on the cluster's activity.
    minInstanceCount number
    The minimum number of worker nodes to autoscale to based on the cluster's activity.
    max_instance_count int
    The maximum number of worker nodes to autoscale to based on the cluster's activity.
    min_instance_count int
    The minimum number of worker nodes to autoscale to based on the cluster's activity.
    maxInstanceCount Number
    The maximum number of worker nodes to autoscale to based on the cluster's activity.
    minInstanceCount Number
    The minimum number of worker nodes to autoscale to based on the cluster's activity.

    SparkClusterRolesWorkerNodeAutoscaleRecurrence, SparkClusterRolesWorkerNodeAutoscaleRecurrenceArgs

    Schedules List<SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule>
    A list of schedule blocks as defined below.
    Timezone string
    The time zone for the autoscale schedule times.
    Schedules []SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule
    A list of schedule blocks as defined below.
    Timezone string
    The time zone for the autoscale schedule times.
    schedules List<SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule>
    A list of schedule blocks as defined below.
    timezone String
    The time zone for the autoscale schedule times.
    schedules SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule[]
    A list of schedule blocks as defined below.
    timezone string
    The time zone for the autoscale schedule times.
    schedules Sequence[SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule]
    A list of schedule blocks as defined below.
    timezone str
    The time zone for the autoscale schedule times.
    schedules List<Property Map>
    A list of schedule blocks as defined below.
    timezone String
    The time zone for the autoscale schedule times.

    SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule, SparkClusterRolesWorkerNodeAutoscaleRecurrenceScheduleArgs

    Days List<string>
    The days of the week to perform autoscale. Possible values are Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday.
    TargetInstanceCount int
    The number of worker nodes to autoscale at the specified time.
    Time string
    The time of day to perform the autoscale in 24hour format.
    Days []string
    The days of the week to perform autoscale. Possible values are Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday.
    TargetInstanceCount int
    The number of worker nodes to autoscale at the specified time.
    Time string
    The time of day to perform the autoscale in 24hour format.
    days List<String>
    The days of the week to perform autoscale. Possible values are Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday.
    targetInstanceCount Integer
    The number of worker nodes to autoscale at the specified time.
    time String
    The time of day to perform the autoscale in 24hour format.
    days string[]
    The days of the week to perform autoscale. Possible values are Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday.
    targetInstanceCount number
    The number of worker nodes to autoscale at the specified time.
    time string
    The time of day to perform the autoscale in 24hour format.
    days Sequence[str]
    The days of the week to perform autoscale. Possible values are Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday.
    target_instance_count int
    The number of worker nodes to autoscale at the specified time.
    time str
    The time of day to perform the autoscale in 24hour format.
    days List<String>
    The days of the week to perform autoscale. Possible values are Monday, Tuesday, Wednesday, Thursday, Friday, Saturday and Sunday.
    targetInstanceCount Number
    The number of worker nodes to autoscale at the specified time.
    time String
    The time of day to perform the autoscale in 24hour format.

    SparkClusterRolesWorkerNodeScriptAction, SparkClusterRolesWorkerNodeScriptActionArgs

    Name string
    The name of the script action.
    Uri string
    The URI to the script.
    Parameters string
    The parameters for the script provided.
    Name string
    The name of the script action.
    Uri string
    The URI to the script.
    Parameters string
    The parameters for the script provided.
    name String
    The name of the script action.
    uri String
    The URI to the script.
    parameters String
    The parameters for the script provided.
    name string
    The name of the script action.
    uri string
    The URI to the script.
    parameters string
    The parameters for the script provided.
    name str
    The name of the script action.
    uri str
    The URI to the script.
    parameters str
    The parameters for the script provided.
    name String
    The name of the script action.
    uri String
    The URI to the script.
    parameters String
    The parameters for the script provided.

    SparkClusterRolesZookeeperNode, SparkClusterRolesZookeeperNodeArgs

    Username string
    The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    VmSize string
    The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    Password string

    The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    ScriptActions List<SparkClusterRolesZookeeperNodeScriptAction>
    The script action which will run on the cluster. One or more script_actions blocks as defined above.
    SshKeys List<string>

    A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    SubnetId string
    The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    VirtualNetworkId string
    The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    Username string
    The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    VmSize string
    The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    Password string

    The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    ScriptActions []SparkClusterRolesZookeeperNodeScriptAction
    The script action which will run on the cluster. One or more script_actions blocks as defined above.
    SshKeys []string

    A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    SubnetId string
    The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    VirtualNetworkId string
    The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    username String
    The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    vmSize String
    The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    password String

    The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    scriptActions List<SparkClusterRolesZookeeperNodeScriptAction>
    The script action which will run on the cluster. One or more script_actions blocks as defined above.
    sshKeys List<String>

    A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    subnetId String
    The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId String
    The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    username string
    The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    vmSize string
    The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    password string

    The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    scriptActions SparkClusterRolesZookeeperNodeScriptAction[]
    The script action which will run on the cluster. One or more script_actions blocks as defined above.
    sshKeys string[]

    A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    subnetId string
    The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId string
    The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    username str
    The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    vm_size str
    The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    password str

    The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    script_actions Sequence[SparkClusterRolesZookeeperNodeScriptAction]
    The script action which will run on the cluster. One or more script_actions blocks as defined above.
    ssh_keys Sequence[str]

    A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    subnet_id str
    The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtual_network_id str
    The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    username String
    The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
    vmSize String
    The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Possible values are ExtraSmall, Small, Medium, Large, ExtraLarge, A5, A6, A7, A8, A9, A10, A11, Standard_A1_V2, Standard_A2_V2, Standard_A2m_V2, Standard_A3, Standard_A4_V2, Standard_A4m_V2, Standard_A8_V2, Standard_A8m_V2, Standard_D1, Standard_D2, Standard_D3, Standard_D4, Standard_D11, Standard_D12, Standard_D13, Standard_D14, Standard_D1_V2, Standard_D2_V2, Standard_D3_V2, Standard_D4_V2, Standard_D5_V2, Standard_D11_V2, Standard_D12_V2, Standard_D13_V2, Standard_D14_V2, Standard_DS1_V2, Standard_DS2_V2, Standard_DS3_V2, Standard_DS4_V2, Standard_DS5_V2, Standard_DS11_V2, Standard_DS12_V2, Standard_DS13_V2, Standard_DS14_V2, Standard_E2_V3, Standard_E4_V3, Standard_E8_V3, Standard_E16_V3, Standard_E20_V3, Standard_E32_V3, Standard_E64_V3, Standard_E64i_V3, Standard_E2s_V3, Standard_E4s_V3, Standard_E8s_V3, Standard_E16s_V3, Standard_E20s_V3, Standard_E32s_V3, Standard_E64s_V3, Standard_E64is_V3, Standard_D2a_V4, Standard_D4a_V4, Standard_D8a_V4, Standard_D16a_V4, Standard_D32a_V4, Standard_D48a_V4, Standard_D64a_V4, Standard_D96a_V4, Standard_E2a_V4, Standard_E4a_V4, Standard_E8a_V4, Standard_E16a_V4, Standard_E20a_V4, Standard_E32a_V4, Standard_E48a_V4, Standard_E64a_V4, Standard_E96a_V4, Standard_G1, Standard_G2, Standard_G3, Standard_G4, Standard_G5, Standard_F2s_V2, Standard_F4s_V2, Standard_F8s_V2, Standard_F16s_V2, Standard_F32s_V2, Standard_F64s_V2, Standard_F72s_V2, Standard_GS1, Standard_GS2, Standard_GS3, Standard_GS4, Standard_GS5 and Standard_NC24. Changing this forces a new resource to be created.
    password String

    The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.

    NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).

    scriptActions List<Property Map>
    The script action which will run on the cluster. One or more script_actions blocks as defined above.
    sshKeys List<String>

    A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.

    NOTE: Either a password or one or more ssh_keys must be specified - but not both.

    subnetId String
    The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
    virtualNetworkId String
    The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.

    SparkClusterRolesZookeeperNodeScriptAction, SparkClusterRolesZookeeperNodeScriptActionArgs

    Name string
    The name of the script action.
    Uri string
    The URI to the script.
    Parameters string
    The parameters for the script provided.
    Name string
    The name of the script action.
    Uri string
    The URI to the script.
    Parameters string
    The parameters for the script provided.
    name String
    The name of the script action.
    uri String
    The URI to the script.
    parameters String
    The parameters for the script provided.
    name string
    The name of the script action.
    uri string
    The URI to the script.
    parameters string
    The parameters for the script provided.
    name str
    The name of the script action.
    uri str
    The URI to the script.
    parameters str
    The parameters for the script provided.
    name String
    The name of the script action.
    uri String
    The URI to the script.
    parameters String
    The parameters for the script provided.

    SparkClusterSecurityProfile, SparkClusterSecurityProfileArgs

    AaddsResourceId string
    The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
    DomainName string
    The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    DomainUserPassword string
    The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    DomainUsername string
    The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    LdapsUrls List<string>
    A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
    MsiResourceId string
    The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
    ClusterUsersGroupDns List<string>
    A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
    AaddsResourceId string
    The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
    DomainName string
    The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    DomainUserPassword string
    The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    DomainUsername string
    The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    LdapsUrls []string
    A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
    MsiResourceId string
    The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
    ClusterUsersGroupDns []string
    A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
    aaddsResourceId String
    The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
    domainName String
    The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domainUserPassword String
    The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domainUsername String
    The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    ldapsUrls List<String>
    A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
    msiResourceId String
    The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
    clusterUsersGroupDns List<String>
    A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
    aaddsResourceId string
    The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
    domainName string
    The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domainUserPassword string
    The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domainUsername string
    The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    ldapsUrls string[]
    A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
    msiResourceId string
    The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
    clusterUsersGroupDns string[]
    A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
    aadds_resource_id str
    The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
    domain_name str
    The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domain_user_password str
    The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domain_username str
    The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    ldaps_urls Sequence[str]
    A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
    msi_resource_id str
    The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
    cluster_users_group_dns Sequence[str]
    A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
    aaddsResourceId String
    The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
    domainName String
    The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domainUserPassword String
    The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    domainUsername String
    The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
    ldapsUrls List<String>
    A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
    msiResourceId String
    The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
    clusterUsersGroupDns List<String>
    A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.

    SparkClusterStorageAccount, SparkClusterStorageAccountArgs

    IsDefault bool

    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.

    NOTE: One of the storage_account or storage_account_gen2 blocks must be marked as the default.

    StorageAccountKey string
    The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
    StorageContainerId string

    The ID of the Storage Container. Changing this forces a new resource to be created.

    NOTE: This can be obtained from the id of the azure.storage.Container resource.

    StorageResourceId string
    The ID of the Storage Account. Changing this forces a new resource to be created.
    IsDefault bool

    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.

    NOTE: One of the storage_account or storage_account_gen2 blocks must be marked as the default.

    StorageAccountKey string
    The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
    StorageContainerId string

    The ID of the Storage Container. Changing this forces a new resource to be created.

    NOTE: This can be obtained from the id of the azure.storage.Container resource.

    StorageResourceId string
    The ID of the Storage Account. Changing this forces a new resource to be created.
    isDefault Boolean

    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.

    NOTE: One of the storage_account or storage_account_gen2 blocks must be marked as the default.

    storageAccountKey String
    The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
    storageContainerId String

    The ID of the Storage Container. Changing this forces a new resource to be created.

    NOTE: This can be obtained from the id of the azure.storage.Container resource.

    storageResourceId String
    The ID of the Storage Account. Changing this forces a new resource to be created.
    isDefault boolean

    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.

    NOTE: One of the storage_account or storage_account_gen2 blocks must be marked as the default.

    storageAccountKey string
    The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
    storageContainerId string

    The ID of the Storage Container. Changing this forces a new resource to be created.

    NOTE: This can be obtained from the id of the azure.storage.Container resource.

    storageResourceId string
    The ID of the Storage Account. Changing this forces a new resource to be created.
    is_default bool

    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.

    NOTE: One of the storage_account or storage_account_gen2 blocks must be marked as the default.

    storage_account_key str
    The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
    storage_container_id str

    The ID of the Storage Container. Changing this forces a new resource to be created.

    NOTE: This can be obtained from the id of the azure.storage.Container resource.

    storage_resource_id str
    The ID of the Storage Account. Changing this forces a new resource to be created.
    isDefault Boolean

    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.

    NOTE: One of the storage_account or storage_account_gen2 blocks must be marked as the default.

    storageAccountKey String
    The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
    storageContainerId String

    The ID of the Storage Container. Changing this forces a new resource to be created.

    NOTE: This can be obtained from the id of the azure.storage.Container resource.

    storageResourceId String
    The ID of the Storage Account. Changing this forces a new resource to be created.

    SparkClusterStorageAccountGen2, SparkClusterStorageAccountGen2Args

    FilesystemId string
    The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
    IsDefault bool

    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.

    NOTE: One of the storage_account or storage_account_gen2 blocks must be marked as the default.

    ManagedIdentityResourceId string

    The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.

    NOTE: This can be obtained from the id of the azure.storage.Container resource.

    StorageResourceId string
    The ID of the Storage Account. Changing this forces a new resource to be created.
    FilesystemId string
    The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
    IsDefault bool

    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.

    NOTE: One of the storage_account or storage_account_gen2 blocks must be marked as the default.

    ManagedIdentityResourceId string

    The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.

    NOTE: This can be obtained from the id of the azure.storage.Container resource.

    StorageResourceId string
    The ID of the Storage Account. Changing this forces a new resource to be created.
    filesystemId String
    The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
    isDefault Boolean

    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.

    NOTE: One of the storage_account or storage_account_gen2 blocks must be marked as the default.

    managedIdentityResourceId String

    The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.

    NOTE: This can be obtained from the id of the azure.storage.Container resource.

    storageResourceId String
    The ID of the Storage Account. Changing this forces a new resource to be created.
    filesystemId string
    The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
    isDefault boolean

    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.

    NOTE: One of the storage_account or storage_account_gen2 blocks must be marked as the default.

    managedIdentityResourceId string

    The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.

    NOTE: This can be obtained from the id of the azure.storage.Container resource.

    storageResourceId string
    The ID of the Storage Account. Changing this forces a new resource to be created.
    filesystem_id str
    The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
    is_default bool

    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.

    NOTE: One of the storage_account or storage_account_gen2 blocks must be marked as the default.

    managed_identity_resource_id str

    The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.

    NOTE: This can be obtained from the id of the azure.storage.Container resource.

    storage_resource_id str
    The ID of the Storage Account. Changing this forces a new resource to be created.
    filesystemId String
    The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
    isDefault Boolean

    Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.

    NOTE: One of the storage_account or storage_account_gen2 blocks must be marked as the default.

    managedIdentityResourceId String

    The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.

    NOTE: This can be obtained from the id of the azure.storage.Container resource.

    storageResourceId String
    The ID of the Storage Account. Changing this forces a new resource to be created.

    Import

    HDInsight Spark Clusters can be imported using the resource id, e.g.

    $ pulumi import azure:hdinsight/sparkCluster:SparkCluster example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.HDInsight/clusters/cluster1
    

    Package Details

    Repository
    Azure Classic pulumi/pulumi-azure
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the azurerm Terraform Provider.
    azure logo

    We recommend using Azure Native.

    Azure Classic v5.69.0 published on Thursday, Mar 14, 2024 by Pulumi