1. Packages
  2. Azure Native
  3. API Docs
  4. synapse
  5. BigDataPool
This is the latest version of Azure Native. Use the Azure Native v1 docs if using the v1 version of this package.
Azure Native v2.9.0 published on Wednesday, Sep 27, 2023 by Pulumi

azure-native.synapse.BigDataPool

Explore with Pulumi AI

azure-native logo
This is the latest version of Azure Native. Use the Azure Native v1 docs if using the v1 version of this package.
Azure Native v2.9.0 published on Wednesday, Sep 27, 2023 by Pulumi

    A Big Data pool Azure REST API version: 2021-06-01. Prior API version in Azure Native 1.x: 2021-03-01

    Example Usage

    Create or update a Big Data pool

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using AzureNative = Pulumi.AzureNative;
    
    return await Deployment.RunAsync(() => 
    {
        var bigDataPool = new AzureNative.Synapse.BigDataPool("bigDataPool", new()
        {
            AutoPause = new AzureNative.Synapse.Inputs.AutoPausePropertiesArgs
            {
                DelayInMinutes = 15,
                Enabled = true,
            },
            AutoScale = new AzureNative.Synapse.Inputs.AutoScalePropertiesArgs
            {
                Enabled = true,
                MaxNodeCount = 50,
                MinNodeCount = 3,
            },
            BigDataPoolName = "ExamplePool",
            DefaultSparkLogFolder = "/logs",
            IsAutotuneEnabled = false,
            LibraryRequirements = new AzureNative.Synapse.Inputs.LibraryRequirementsArgs
            {
                Content = "",
                Filename = "requirements.txt",
            },
            Location = "West US 2",
            NodeCount = 4,
            NodeSize = "Medium",
            NodeSizeFamily = "MemoryOptimized",
            ResourceGroupName = "ExampleResourceGroup",
            SparkEventsFolder = "/events",
            SparkVersion = "3.3",
            Tags = 
            {
                { "key", "value" },
            },
            WorkspaceName = "ExampleWorkspace",
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-azure-native-sdk/synapse/v2"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := synapse.NewBigDataPool(ctx, "bigDataPool", &synapse.BigDataPoolArgs{
    			AutoPause: &synapse.AutoPausePropertiesArgs{
    				DelayInMinutes: pulumi.Int(15),
    				Enabled:        pulumi.Bool(true),
    			},
    			AutoScale: &synapse.AutoScalePropertiesArgs{
    				Enabled:      pulumi.Bool(true),
    				MaxNodeCount: pulumi.Int(50),
    				MinNodeCount: pulumi.Int(3),
    			},
    			BigDataPoolName:       pulumi.String("ExamplePool"),
    			DefaultSparkLogFolder: pulumi.String("/logs"),
    			IsAutotuneEnabled:     pulumi.Bool(false),
    			LibraryRequirements: &synapse.LibraryRequirementsArgs{
    				Content:  pulumi.String(""),
    				Filename: pulumi.String("requirements.txt"),
    			},
    			Location:          pulumi.String("West US 2"),
    			NodeCount:         pulumi.Int(4),
    			NodeSize:          pulumi.String("Medium"),
    			NodeSizeFamily:    pulumi.String("MemoryOptimized"),
    			ResourceGroupName: pulumi.String("ExampleResourceGroup"),
    			SparkEventsFolder: pulumi.String("/events"),
    			SparkVersion:      pulumi.String("3.3"),
    			Tags: pulumi.StringMap{
    				"key": pulumi.String("value"),
    			},
    			WorkspaceName: pulumi.String("ExampleWorkspace"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.azurenative.synapse.BigDataPool;
    import com.pulumi.azurenative.synapse.BigDataPoolArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var bigDataPool = new BigDataPool("bigDataPool", BigDataPoolArgs.builder()        
                .autoPause(Map.ofEntries(
                    Map.entry("delayInMinutes", 15),
                    Map.entry("enabled", true)
                ))
                .autoScale(Map.ofEntries(
                    Map.entry("enabled", true),
                    Map.entry("maxNodeCount", 50),
                    Map.entry("minNodeCount", 3)
                ))
                .bigDataPoolName("ExamplePool")
                .defaultSparkLogFolder("/logs")
                .isAutotuneEnabled(false)
                .libraryRequirements(Map.ofEntries(
                    Map.entry("content", ""),
                    Map.entry("filename", "requirements.txt")
                ))
                .location("West US 2")
                .nodeCount(4)
                .nodeSize("Medium")
                .nodeSizeFamily("MemoryOptimized")
                .resourceGroupName("ExampleResourceGroup")
                .sparkEventsFolder("/events")
                .sparkVersion("3.3")
                .tags(Map.of("key", "value"))
                .workspaceName("ExampleWorkspace")
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_azure_native as azure_native
    
    big_data_pool = azure_native.synapse.BigDataPool("bigDataPool",
        auto_pause=azure_native.synapse.AutoPausePropertiesArgs(
            delay_in_minutes=15,
            enabled=True,
        ),
        auto_scale=azure_native.synapse.AutoScalePropertiesArgs(
            enabled=True,
            max_node_count=50,
            min_node_count=3,
        ),
        big_data_pool_name="ExamplePool",
        default_spark_log_folder="/logs",
        is_autotune_enabled=False,
        library_requirements=azure_native.synapse.LibraryRequirementsArgs(
            content="",
            filename="requirements.txt",
        ),
        location="West US 2",
        node_count=4,
        node_size="Medium",
        node_size_family="MemoryOptimized",
        resource_group_name="ExampleResourceGroup",
        spark_events_folder="/events",
        spark_version="3.3",
        tags={
            "key": "value",
        },
        workspace_name="ExampleWorkspace")
    
    import * as pulumi from "@pulumi/pulumi";
    import * as azure_native from "@pulumi/azure-native";
    
    const bigDataPool = new azure_native.synapse.BigDataPool("bigDataPool", {
        autoPause: {
            delayInMinutes: 15,
            enabled: true,
        },
        autoScale: {
            enabled: true,
            maxNodeCount: 50,
            minNodeCount: 3,
        },
        bigDataPoolName: "ExamplePool",
        defaultSparkLogFolder: "/logs",
        isAutotuneEnabled: false,
        libraryRequirements: {
            content: "",
            filename: "requirements.txt",
        },
        location: "West US 2",
        nodeCount: 4,
        nodeSize: "Medium",
        nodeSizeFamily: "MemoryOptimized",
        resourceGroupName: "ExampleResourceGroup",
        sparkEventsFolder: "/events",
        sparkVersion: "3.3",
        tags: {
            key: "value",
        },
        workspaceName: "ExampleWorkspace",
    });
    
    resources:
      bigDataPool:
        type: azure-native:synapse:BigDataPool
        properties:
          autoPause:
            delayInMinutes: 15
            enabled: true
          autoScale:
            enabled: true
            maxNodeCount: 50
            minNodeCount: 3
          bigDataPoolName: ExamplePool
          defaultSparkLogFolder: /logs
          isAutotuneEnabled: false
          libraryRequirements:
            content:
            filename: requirements.txt
          location: West US 2
          nodeCount: 4
          nodeSize: Medium
          nodeSizeFamily: MemoryOptimized
          resourceGroupName: ExampleResourceGroup
          sparkEventsFolder: /events
          sparkVersion: '3.3'
          tags:
            key: value
          workspaceName: ExampleWorkspace
    

    Create BigDataPool Resource

    new BigDataPool(name: string, args: BigDataPoolArgs, opts?: CustomResourceOptions);
    @overload
    def BigDataPool(resource_name: str,
                    opts: Optional[ResourceOptions] = None,
                    auto_pause: Optional[AutoPausePropertiesArgs] = None,
                    auto_scale: Optional[AutoScalePropertiesArgs] = None,
                    big_data_pool_name: Optional[str] = None,
                    custom_libraries: Optional[Sequence[LibraryInfoArgs]] = None,
                    default_spark_log_folder: Optional[str] = None,
                    dynamic_executor_allocation: Optional[DynamicExecutorAllocationArgs] = None,
                    force: Optional[bool] = None,
                    is_autotune_enabled: Optional[bool] = None,
                    is_compute_isolation_enabled: Optional[bool] = None,
                    library_requirements: Optional[LibraryRequirementsArgs] = None,
                    location: Optional[str] = None,
                    node_count: Optional[int] = None,
                    node_size: Optional[Union[str, NodeSize]] = None,
                    node_size_family: Optional[Union[str, NodeSizeFamily]] = None,
                    provisioning_state: Optional[str] = None,
                    resource_group_name: Optional[str] = None,
                    session_level_packages_enabled: Optional[bool] = None,
                    spark_config_properties: Optional[SparkConfigPropertiesArgs] = None,
                    spark_events_folder: Optional[str] = None,
                    spark_version: Optional[str] = None,
                    tags: Optional[Mapping[str, str]] = None,
                    workspace_name: Optional[str] = None)
    @overload
    def BigDataPool(resource_name: str,
                    args: BigDataPoolArgs,
                    opts: Optional[ResourceOptions] = None)
    func NewBigDataPool(ctx *Context, name string, args BigDataPoolArgs, opts ...ResourceOption) (*BigDataPool, error)
    public BigDataPool(string name, BigDataPoolArgs args, CustomResourceOptions? opts = null)
    public BigDataPool(String name, BigDataPoolArgs args)
    public BigDataPool(String name, BigDataPoolArgs args, CustomResourceOptions options)
    
    type: azure-native:synapse:BigDataPool
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    
    name string
    The unique name of the resource.
    args BigDataPoolArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args BigDataPoolArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args BigDataPoolArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args BigDataPoolArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args BigDataPoolArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    BigDataPool Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The BigDataPool resource accepts the following input properties:

    ResourceGroupName string

    The name of the resource group. The name is case insensitive.

    WorkspaceName string

    The name of the workspace.

    AutoPause Pulumi.AzureNative.Synapse.Inputs.AutoPauseProperties

    Auto-pausing properties

    AutoScale Pulumi.AzureNative.Synapse.Inputs.AutoScaleProperties

    Auto-scaling properties

    BigDataPoolName string

    Big Data pool name

    CustomLibraries List<Pulumi.AzureNative.Synapse.Inputs.LibraryInfo>

    List of custom libraries/packages associated with the spark pool.

    DefaultSparkLogFolder string

    The default folder where Spark logs will be written.

    DynamicExecutorAllocation Pulumi.AzureNative.Synapse.Inputs.DynamicExecutorAllocation

    Dynamic Executor Allocation

    Force bool

    Whether to stop any running jobs in the Big Data pool

    IsAutotuneEnabled bool

    Whether autotune is required or not.

    IsComputeIsolationEnabled bool

    Whether compute isolation is required or not.

    LibraryRequirements Pulumi.AzureNative.Synapse.Inputs.LibraryRequirements

    Library version requirements

    Location string

    The geo-location where the resource lives

    NodeCount int

    The number of nodes in the Big Data pool.

    NodeSize string | Pulumi.AzureNative.Synapse.NodeSize

    The level of compute power that each node in the Big Data pool has.

    NodeSizeFamily string | Pulumi.AzureNative.Synapse.NodeSizeFamily

    The kind of nodes that the Big Data pool provides.

    ProvisioningState string

    The state of the Big Data pool.

    SessionLevelPackagesEnabled bool

    Whether session level packages enabled.

    SparkConfigProperties Pulumi.AzureNative.Synapse.Inputs.SparkConfigProperties

    Spark configuration file to specify additional properties

    SparkEventsFolder string

    The Spark events folder

    SparkVersion string

    The Apache Spark version.

    Tags Dictionary<string, string>

    Resource tags.

    ResourceGroupName string

    The name of the resource group. The name is case insensitive.

    WorkspaceName string

    The name of the workspace.

    AutoPause AutoPausePropertiesArgs

    Auto-pausing properties

    AutoScale AutoScalePropertiesArgs

    Auto-scaling properties

    BigDataPoolName string

    Big Data pool name

    CustomLibraries []LibraryInfoArgs

    List of custom libraries/packages associated with the spark pool.

    DefaultSparkLogFolder string

    The default folder where Spark logs will be written.

    DynamicExecutorAllocation DynamicExecutorAllocationArgs

    Dynamic Executor Allocation

    Force bool

    Whether to stop any running jobs in the Big Data pool

    IsAutotuneEnabled bool

    Whether autotune is required or not.

    IsComputeIsolationEnabled bool

    Whether compute isolation is required or not.

    LibraryRequirements LibraryRequirementsArgs

    Library version requirements

    Location string

    The geo-location where the resource lives

    NodeCount int

    The number of nodes in the Big Data pool.

    NodeSize string | NodeSize

    The level of compute power that each node in the Big Data pool has.

    NodeSizeFamily string | NodeSizeFamily

    The kind of nodes that the Big Data pool provides.

    ProvisioningState string

    The state of the Big Data pool.

    SessionLevelPackagesEnabled bool

    Whether session level packages enabled.

    SparkConfigProperties SparkConfigPropertiesArgs

    Spark configuration file to specify additional properties

    SparkEventsFolder string

    The Spark events folder

    SparkVersion string

    The Apache Spark version.

    Tags map[string]string

    Resource tags.

    resourceGroupName String

    The name of the resource group. The name is case insensitive.

    workspaceName String

    The name of the workspace.

    autoPause AutoPauseProperties

    Auto-pausing properties

    autoScale AutoScaleProperties

    Auto-scaling properties

    bigDataPoolName String

    Big Data pool name

    customLibraries List<LibraryInfo>

    List of custom libraries/packages associated with the spark pool.

    defaultSparkLogFolder String

    The default folder where Spark logs will be written.

    dynamicExecutorAllocation DynamicExecutorAllocation

    Dynamic Executor Allocation

    force Boolean

    Whether to stop any running jobs in the Big Data pool

    isAutotuneEnabled Boolean

    Whether autotune is required or not.

    isComputeIsolationEnabled Boolean

    Whether compute isolation is required or not.

    libraryRequirements LibraryRequirements

    Library version requirements

    location String

    The geo-location where the resource lives

    nodeCount Integer

    The number of nodes in the Big Data pool.

    nodeSize String | NodeSize

    The level of compute power that each node in the Big Data pool has.

    nodeSizeFamily String | NodeSizeFamily

    The kind of nodes that the Big Data pool provides.

    provisioningState String

    The state of the Big Data pool.

    sessionLevelPackagesEnabled Boolean

    Whether session level packages enabled.

    sparkConfigProperties SparkConfigProperties

    Spark configuration file to specify additional properties

    sparkEventsFolder String

    The Spark events folder

    sparkVersion String

    The Apache Spark version.

    tags Map<String,String>

    Resource tags.

    resourceGroupName string

    The name of the resource group. The name is case insensitive.

    workspaceName string

    The name of the workspace.

    autoPause AutoPauseProperties

    Auto-pausing properties

    autoScale AutoScaleProperties

    Auto-scaling properties

    bigDataPoolName string

    Big Data pool name

    customLibraries LibraryInfo[]

    List of custom libraries/packages associated with the spark pool.

    defaultSparkLogFolder string

    The default folder where Spark logs will be written.

    dynamicExecutorAllocation DynamicExecutorAllocation

    Dynamic Executor Allocation

    force boolean

    Whether to stop any running jobs in the Big Data pool

    isAutotuneEnabled boolean

    Whether autotune is required or not.

    isComputeIsolationEnabled boolean

    Whether compute isolation is required or not.

    libraryRequirements LibraryRequirements

    Library version requirements

    location string

    The geo-location where the resource lives

    nodeCount number

    The number of nodes in the Big Data pool.

    nodeSize string | NodeSize

    The level of compute power that each node in the Big Data pool has.

    nodeSizeFamily string | NodeSizeFamily

    The kind of nodes that the Big Data pool provides.

    provisioningState string

    The state of the Big Data pool.

    sessionLevelPackagesEnabled boolean

    Whether session level packages enabled.

    sparkConfigProperties SparkConfigProperties

    Spark configuration file to specify additional properties

    sparkEventsFolder string

    The Spark events folder

    sparkVersion string

    The Apache Spark version.

    tags {[key: string]: string}

    Resource tags.

    resource_group_name str

    The name of the resource group. The name is case insensitive.

    workspace_name str

    The name of the workspace.

    auto_pause AutoPausePropertiesArgs

    Auto-pausing properties

    auto_scale AutoScalePropertiesArgs

    Auto-scaling properties

    big_data_pool_name str

    Big Data pool name

    custom_libraries Sequence[LibraryInfoArgs]

    List of custom libraries/packages associated with the spark pool.

    default_spark_log_folder str

    The default folder where Spark logs will be written.

    dynamic_executor_allocation DynamicExecutorAllocationArgs

    Dynamic Executor Allocation

    force bool

    Whether to stop any running jobs in the Big Data pool

    is_autotune_enabled bool

    Whether autotune is required or not.

    is_compute_isolation_enabled bool

    Whether compute isolation is required or not.

    library_requirements LibraryRequirementsArgs

    Library version requirements

    location str

    The geo-location where the resource lives

    node_count int

    The number of nodes in the Big Data pool.

    node_size str | NodeSize

    The level of compute power that each node in the Big Data pool has.

    node_size_family str | NodeSizeFamily

    The kind of nodes that the Big Data pool provides.

    provisioning_state str

    The state of the Big Data pool.

    session_level_packages_enabled bool

    Whether session level packages enabled.

    spark_config_properties SparkConfigPropertiesArgs

    Spark configuration file to specify additional properties

    spark_events_folder str

    The Spark events folder

    spark_version str

    The Apache Spark version.

    tags Mapping[str, str]

    Resource tags.

    resourceGroupName String

    The name of the resource group. The name is case insensitive.

    workspaceName String

    The name of the workspace.

    autoPause Property Map

    Auto-pausing properties

    autoScale Property Map

    Auto-scaling properties

    bigDataPoolName String

    Big Data pool name

    customLibraries List<Property Map>

    List of custom libraries/packages associated with the spark pool.

    defaultSparkLogFolder String

    The default folder where Spark logs will be written.

    dynamicExecutorAllocation Property Map

    Dynamic Executor Allocation

    force Boolean

    Whether to stop any running jobs in the Big Data pool

    isAutotuneEnabled Boolean

    Whether autotune is required or not.

    isComputeIsolationEnabled Boolean

    Whether compute isolation is required or not.

    libraryRequirements Property Map

    Library version requirements

    location String

    The geo-location where the resource lives

    nodeCount Number

    The number of nodes in the Big Data pool.

    nodeSize String | "None" | "Small" | "Medium" | "Large" | "XLarge" | "XXLarge" | "XXXLarge"

    The level of compute power that each node in the Big Data pool has.

    nodeSizeFamily String | "None" | "MemoryOptimized" | "HardwareAcceleratedFPGA" | "HardwareAcceleratedGPU"

    The kind of nodes that the Big Data pool provides.

    provisioningState String

    The state of the Big Data pool.

    sessionLevelPackagesEnabled Boolean

    Whether session level packages enabled.

    sparkConfigProperties Property Map

    Spark configuration file to specify additional properties

    sparkEventsFolder String

    The Spark events folder

    sparkVersion String

    The Apache Spark version.

    tags Map<String>

    Resource tags.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the BigDataPool resource produces the following output properties:

    CacheSize int

    The cache size

    CreationDate string

    The time when the Big Data pool was created.

    Id string

    The provider-assigned unique ID for this managed resource.

    LastSucceededTimestamp string

    The time when the Big Data pool was updated successfully.

    Name string

    The name of the resource

    Type string

    The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

    CacheSize int

    The cache size

    CreationDate string

    The time when the Big Data pool was created.

    Id string

    The provider-assigned unique ID for this managed resource.

    LastSucceededTimestamp string

    The time when the Big Data pool was updated successfully.

    Name string

    The name of the resource

    Type string

    The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

    cacheSize Integer

    The cache size

    creationDate String

    The time when the Big Data pool was created.

    id String

    The provider-assigned unique ID for this managed resource.

    lastSucceededTimestamp String

    The time when the Big Data pool was updated successfully.

    name String

    The name of the resource

    type String

    The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

    cacheSize number

    The cache size

    creationDate string

    The time when the Big Data pool was created.

    id string

    The provider-assigned unique ID for this managed resource.

    lastSucceededTimestamp string

    The time when the Big Data pool was updated successfully.

    name string

    The name of the resource

    type string

    The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

    cache_size int

    The cache size

    creation_date str

    The time when the Big Data pool was created.

    id str

    The provider-assigned unique ID for this managed resource.

    last_succeeded_timestamp str

    The time when the Big Data pool was updated successfully.

    name str

    The name of the resource

    type str

    The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

    cacheSize Number

    The cache size

    creationDate String

    The time when the Big Data pool was created.

    id String

    The provider-assigned unique ID for this managed resource.

    lastSucceededTimestamp String

    The time when the Big Data pool was updated successfully.

    name String

    The name of the resource

    type String

    The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

    Supporting Types

    AutoPauseProperties, AutoPausePropertiesArgs

    DelayInMinutes int

    Number of minutes of idle time before the Big Data pool is automatically paused.

    Enabled bool

    Whether auto-pausing is enabled for the Big Data pool.

    DelayInMinutes int

    Number of minutes of idle time before the Big Data pool is automatically paused.

    Enabled bool

    Whether auto-pausing is enabled for the Big Data pool.

    delayInMinutes Integer

    Number of minutes of idle time before the Big Data pool is automatically paused.

    enabled Boolean

    Whether auto-pausing is enabled for the Big Data pool.

    delayInMinutes number

    Number of minutes of idle time before the Big Data pool is automatically paused.

    enabled boolean

    Whether auto-pausing is enabled for the Big Data pool.

    delay_in_minutes int

    Number of minutes of idle time before the Big Data pool is automatically paused.

    enabled bool

    Whether auto-pausing is enabled for the Big Data pool.

    delayInMinutes Number

    Number of minutes of idle time before the Big Data pool is automatically paused.

    enabled Boolean

    Whether auto-pausing is enabled for the Big Data pool.

    AutoPausePropertiesResponse, AutoPausePropertiesResponseArgs

    DelayInMinutes int

    Number of minutes of idle time before the Big Data pool is automatically paused.

    Enabled bool

    Whether auto-pausing is enabled for the Big Data pool.

    DelayInMinutes int

    Number of minutes of idle time before the Big Data pool is automatically paused.

    Enabled bool

    Whether auto-pausing is enabled for the Big Data pool.

    delayInMinutes Integer

    Number of minutes of idle time before the Big Data pool is automatically paused.

    enabled Boolean

    Whether auto-pausing is enabled for the Big Data pool.

    delayInMinutes number

    Number of minutes of idle time before the Big Data pool is automatically paused.

    enabled boolean

    Whether auto-pausing is enabled for the Big Data pool.

    delay_in_minutes int

    Number of minutes of idle time before the Big Data pool is automatically paused.

    enabled bool

    Whether auto-pausing is enabled for the Big Data pool.

    delayInMinutes Number

    Number of minutes of idle time before the Big Data pool is automatically paused.

    enabled Boolean

    Whether auto-pausing is enabled for the Big Data pool.

    AutoScaleProperties, AutoScalePropertiesArgs

    Enabled bool

    Whether automatic scaling is enabled for the Big Data pool.

    MaxNodeCount int

    The maximum number of nodes the Big Data pool can support.

    MinNodeCount int

    The minimum number of nodes the Big Data pool can support.

    Enabled bool

    Whether automatic scaling is enabled for the Big Data pool.

    MaxNodeCount int

    The maximum number of nodes the Big Data pool can support.

    MinNodeCount int

    The minimum number of nodes the Big Data pool can support.

    enabled Boolean

    Whether automatic scaling is enabled for the Big Data pool.

    maxNodeCount Integer

    The maximum number of nodes the Big Data pool can support.

    minNodeCount Integer

    The minimum number of nodes the Big Data pool can support.

    enabled boolean

    Whether automatic scaling is enabled for the Big Data pool.

    maxNodeCount number

    The maximum number of nodes the Big Data pool can support.

    minNodeCount number

    The minimum number of nodes the Big Data pool can support.

    enabled bool

    Whether automatic scaling is enabled for the Big Data pool.

    max_node_count int

    The maximum number of nodes the Big Data pool can support.

    min_node_count int

    The minimum number of nodes the Big Data pool can support.

    enabled Boolean

    Whether automatic scaling is enabled for the Big Data pool.

    maxNodeCount Number

    The maximum number of nodes the Big Data pool can support.

    minNodeCount Number

    The minimum number of nodes the Big Data pool can support.

    AutoScalePropertiesResponse, AutoScalePropertiesResponseArgs

    Enabled bool

    Whether automatic scaling is enabled for the Big Data pool.

    MaxNodeCount int

    The maximum number of nodes the Big Data pool can support.

    MinNodeCount int

    The minimum number of nodes the Big Data pool can support.

    Enabled bool

    Whether automatic scaling is enabled for the Big Data pool.

    MaxNodeCount int

    The maximum number of nodes the Big Data pool can support.

    MinNodeCount int

    The minimum number of nodes the Big Data pool can support.

    enabled Boolean

    Whether automatic scaling is enabled for the Big Data pool.

    maxNodeCount Integer

    The maximum number of nodes the Big Data pool can support.

    minNodeCount Integer

    The minimum number of nodes the Big Data pool can support.

    enabled boolean

    Whether automatic scaling is enabled for the Big Data pool.

    maxNodeCount number

    The maximum number of nodes the Big Data pool can support.

    minNodeCount number

    The minimum number of nodes the Big Data pool can support.

    enabled bool

    Whether automatic scaling is enabled for the Big Data pool.

    max_node_count int

    The maximum number of nodes the Big Data pool can support.

    min_node_count int

    The minimum number of nodes the Big Data pool can support.

    enabled Boolean

    Whether automatic scaling is enabled for the Big Data pool.

    maxNodeCount Number

    The maximum number of nodes the Big Data pool can support.

    minNodeCount Number

    The minimum number of nodes the Big Data pool can support.

    ConfigurationType, ConfigurationTypeArgs

    File
    File
    Artifact
    Artifact
    ConfigurationTypeFile
    File
    ConfigurationTypeArtifact
    Artifact
    File
    File
    Artifact
    Artifact
    File
    File
    Artifact
    Artifact
    FILE
    File
    ARTIFACT
    Artifact
    "File"
    File
    "Artifact"
    Artifact

    DynamicExecutorAllocation, DynamicExecutorAllocationArgs

    Enabled bool

    Indicates whether Dynamic Executor Allocation is enabled or not.

    MaxExecutors int

    The maximum number of executors alloted

    MinExecutors int

    The minimum number of executors alloted

    Enabled bool

    Indicates whether Dynamic Executor Allocation is enabled or not.

    MaxExecutors int

    The maximum number of executors alloted

    MinExecutors int

    The minimum number of executors alloted

    enabled Boolean

    Indicates whether Dynamic Executor Allocation is enabled or not.

    maxExecutors Integer

    The maximum number of executors alloted

    minExecutors Integer

    The minimum number of executors alloted

    enabled boolean

    Indicates whether Dynamic Executor Allocation is enabled or not.

    maxExecutors number

    The maximum number of executors alloted

    minExecutors number

    The minimum number of executors alloted

    enabled bool

    Indicates whether Dynamic Executor Allocation is enabled or not.

    max_executors int

    The maximum number of executors alloted

    min_executors int

    The minimum number of executors alloted

    enabled Boolean

    Indicates whether Dynamic Executor Allocation is enabled or not.

    maxExecutors Number

    The maximum number of executors alloted

    minExecutors Number

    The minimum number of executors alloted

    DynamicExecutorAllocationResponse, DynamicExecutorAllocationResponseArgs

    Enabled bool

    Indicates whether Dynamic Executor Allocation is enabled or not.

    MaxExecutors int

    The maximum number of executors alloted

    MinExecutors int

    The minimum number of executors alloted

    Enabled bool

    Indicates whether Dynamic Executor Allocation is enabled or not.

    MaxExecutors int

    The maximum number of executors alloted

    MinExecutors int

    The minimum number of executors alloted

    enabled Boolean

    Indicates whether Dynamic Executor Allocation is enabled or not.

    maxExecutors Integer

    The maximum number of executors alloted

    minExecutors Integer

    The minimum number of executors alloted

    enabled boolean

    Indicates whether Dynamic Executor Allocation is enabled or not.

    maxExecutors number

    The maximum number of executors alloted

    minExecutors number

    The minimum number of executors alloted

    enabled bool

    Indicates whether Dynamic Executor Allocation is enabled or not.

    max_executors int

    The maximum number of executors alloted

    min_executors int

    The minimum number of executors alloted

    enabled Boolean

    Indicates whether Dynamic Executor Allocation is enabled or not.

    maxExecutors Number

    The maximum number of executors alloted

    minExecutors Number

    The minimum number of executors alloted

    LibraryInfo, LibraryInfoArgs

    ContainerName string

    Storage blob container name.

    Name string

    Name of the library.

    Path string

    Storage blob path of library.

    Type string

    Type of the library.

    ContainerName string

    Storage blob container name.

    Name string

    Name of the library.

    Path string

    Storage blob path of library.

    Type string

    Type of the library.

    containerName String

    Storage blob container name.

    name String

    Name of the library.

    path String

    Storage blob path of library.

    type String

    Type of the library.

    containerName string

    Storage blob container name.

    name string

    Name of the library.

    path string

    Storage blob path of library.

    type string

    Type of the library.

    container_name str

    Storage blob container name.

    name str

    Name of the library.

    path str

    Storage blob path of library.

    type str

    Type of the library.

    containerName String

    Storage blob container name.

    name String

    Name of the library.

    path String

    Storage blob path of library.

    type String

    Type of the library.

    LibraryInfoResponse, LibraryInfoResponseArgs

    CreatorId string

    Creator Id of the library/package.

    ProvisioningStatus string

    Provisioning status of the library/package.

    UploadedTimestamp string

    The last update time of the library.

    ContainerName string

    Storage blob container name.

    Name string

    Name of the library.

    Path string

    Storage blob path of library.

    Type string

    Type of the library.

    CreatorId string

    Creator Id of the library/package.

    ProvisioningStatus string

    Provisioning status of the library/package.

    UploadedTimestamp string

    The last update time of the library.

    ContainerName string

    Storage blob container name.

    Name string

    Name of the library.

    Path string

    Storage blob path of library.

    Type string

    Type of the library.

    creatorId String

    Creator Id of the library/package.

    provisioningStatus String

    Provisioning status of the library/package.

    uploadedTimestamp String

    The last update time of the library.

    containerName String

    Storage blob container name.

    name String

    Name of the library.

    path String

    Storage blob path of library.

    type String

    Type of the library.

    creatorId string

    Creator Id of the library/package.

    provisioningStatus string

    Provisioning status of the library/package.

    uploadedTimestamp string

    The last update time of the library.

    containerName string

    Storage blob container name.

    name string

    Name of the library.

    path string

    Storage blob path of library.

    type string

    Type of the library.

    creator_id str

    Creator Id of the library/package.

    provisioning_status str

    Provisioning status of the library/package.

    uploaded_timestamp str

    The last update time of the library.

    container_name str

    Storage blob container name.

    name str

    Name of the library.

    path str

    Storage blob path of library.

    type str

    Type of the library.

    creatorId String

    Creator Id of the library/package.

    provisioningStatus String

    Provisioning status of the library/package.

    uploadedTimestamp String

    The last update time of the library.

    containerName String

    Storage blob container name.

    name String

    Name of the library.

    path String

    Storage blob path of library.

    type String

    Type of the library.

    LibraryRequirements, LibraryRequirementsArgs

    Content string

    The library requirements.

    Filename string

    The filename of the library requirements file.

    Content string

    The library requirements.

    Filename string

    The filename of the library requirements file.

    content String

    The library requirements.

    filename String

    The filename of the library requirements file.

    content string

    The library requirements.

    filename string

    The filename of the library requirements file.

    content str

    The library requirements.

    filename str

    The filename of the library requirements file.

    content String

    The library requirements.

    filename String

    The filename of the library requirements file.

    LibraryRequirementsResponse, LibraryRequirementsResponseArgs

    Time string

    The last update time of the library requirements file.

    Content string

    The library requirements.

    Filename string

    The filename of the library requirements file.

    Time string

    The last update time of the library requirements file.

    Content string

    The library requirements.

    Filename string

    The filename of the library requirements file.

    time String

    The last update time of the library requirements file.

    content String

    The library requirements.

    filename String

    The filename of the library requirements file.

    time string

    The last update time of the library requirements file.

    content string

    The library requirements.

    filename string

    The filename of the library requirements file.

    time str

    The last update time of the library requirements file.

    content str

    The library requirements.

    filename str

    The filename of the library requirements file.

    time String

    The last update time of the library requirements file.

    content String

    The library requirements.

    filename String

    The filename of the library requirements file.

    NodeSize, NodeSizeArgs

    None
    None
    Small
    Small
    Medium
    Medium
    Large
    Large
    XLarge
    XLarge
    XXLarge
    XXLarge
    XXXLarge
    XXXLarge
    NodeSizeNone
    None
    NodeSizeSmall
    Small
    NodeSizeMedium
    Medium
    NodeSizeLarge
    Large
    NodeSizeXLarge
    XLarge
    NodeSizeXXLarge
    XXLarge
    NodeSizeXXXLarge
    XXXLarge
    None
    None
    Small
    Small
    Medium
    Medium
    Large
    Large
    XLarge
    XLarge
    XXLarge
    XXLarge
    XXXLarge
    XXXLarge
    None
    None
    Small
    Small
    Medium
    Medium
    Large
    Large
    XLarge
    XLarge
    XXLarge
    XXLarge
    XXXLarge
    XXXLarge
    NONE
    None
    SMALL
    Small
    MEDIUM
    Medium
    LARGE
    Large
    X_LARGE
    XLarge
    XX_LARGE
    XXLarge
    XXX_LARGE
    XXXLarge
    "None"
    None
    "Small"
    Small
    "Medium"
    Medium
    "Large"
    Large
    "XLarge"
    XLarge
    "XXLarge"
    XXLarge
    "XXXLarge"
    XXXLarge

    NodeSizeFamily, NodeSizeFamilyArgs

    None
    None
    MemoryOptimized
    MemoryOptimized
    HardwareAcceleratedFPGA
    HardwareAcceleratedFPGA
    HardwareAcceleratedGPU
    HardwareAcceleratedGPU
    NodeSizeFamilyNone
    None
    NodeSizeFamilyMemoryOptimized
    MemoryOptimized
    NodeSizeFamilyHardwareAcceleratedFPGA
    HardwareAcceleratedFPGA
    NodeSizeFamilyHardwareAcceleratedGPU
    HardwareAcceleratedGPU
    None
    None
    MemoryOptimized
    MemoryOptimized
    HardwareAcceleratedFPGA
    HardwareAcceleratedFPGA
    HardwareAcceleratedGPU
    HardwareAcceleratedGPU
    None
    None
    MemoryOptimized
    MemoryOptimized
    HardwareAcceleratedFPGA
    HardwareAcceleratedFPGA
    HardwareAcceleratedGPU
    HardwareAcceleratedGPU
    NONE
    None
    MEMORY_OPTIMIZED
    MemoryOptimized
    HARDWARE_ACCELERATED_FPGA
    HardwareAcceleratedFPGA
    HARDWARE_ACCELERATED_GPU
    HardwareAcceleratedGPU
    "None"
    None
    "MemoryOptimized"
    MemoryOptimized
    "HardwareAcceleratedFPGA"
    HardwareAcceleratedFPGA
    "HardwareAcceleratedGPU"
    HardwareAcceleratedGPU

    SparkConfigProperties, SparkConfigPropertiesArgs

    ConfigurationType string | Pulumi.AzureNative.Synapse.ConfigurationType

    The type of the spark config properties file.

    Content string

    The spark config properties.

    Filename string

    The filename of the spark config properties file.

    ConfigurationType string | ConfigurationType

    The type of the spark config properties file.

    Content string

    The spark config properties.

    Filename string

    The filename of the spark config properties file.

    configurationType String | ConfigurationType

    The type of the spark config properties file.

    content String

    The spark config properties.

    filename String

    The filename of the spark config properties file.

    configurationType string | ConfigurationType

    The type of the spark config properties file.

    content string

    The spark config properties.

    filename string

    The filename of the spark config properties file.

    configuration_type str | ConfigurationType

    The type of the spark config properties file.

    content str

    The spark config properties.

    filename str

    The filename of the spark config properties file.

    configurationType String | "File" | "Artifact"

    The type of the spark config properties file.

    content String

    The spark config properties.

    filename String

    The filename of the spark config properties file.

    SparkConfigPropertiesResponse, SparkConfigPropertiesResponseArgs

    Time string

    The last update time of the spark config properties file.

    ConfigurationType string

    The type of the spark config properties file.

    Content string

    The spark config properties.

    Filename string

    The filename of the spark config properties file.

    Time string

    The last update time of the spark config properties file.

    ConfigurationType string

    The type of the spark config properties file.

    Content string

    The spark config properties.

    Filename string

    The filename of the spark config properties file.

    time String

    The last update time of the spark config properties file.

    configurationType String

    The type of the spark config properties file.

    content String

    The spark config properties.

    filename String

    The filename of the spark config properties file.

    time string

    The last update time of the spark config properties file.

    configurationType string

    The type of the spark config properties file.

    content string

    The spark config properties.

    filename string

    The filename of the spark config properties file.

    time str

    The last update time of the spark config properties file.

    configuration_type str

    The type of the spark config properties file.

    content str

    The spark config properties.

    filename str

    The filename of the spark config properties file.

    time String

    The last update time of the spark config properties file.

    configurationType String

    The type of the spark config properties file.

    content String

    The spark config properties.

    filename String

    The filename of the spark config properties file.

    Import

    An existing resource can be imported using its type token, name, and identifier, e.g.

    $ pulumi import azure-native:synapse:BigDataPool ExamplePool /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/bigDataPools/{bigDataPoolName} 
    

    Package Details

    Repository
    Azure Native pulumi/pulumi-azure-native
    License
    Apache-2.0
    azure-native logo
    This is the latest version of Azure Native. Use the Azure Native v1 docs if using the v1 version of this package.
    Azure Native v2.9.0 published on Wednesday, Sep 27, 2023 by Pulumi