azure-native.synapse.BigDataPool

A Big Data pool API Version: 2021-03-01.

Example Usage

Create or update a Big Data pool

using System.Collections.Generic;
using Pulumi;
using AzureNative = Pulumi.AzureNative;

return await Deployment.RunAsync(() => 
{
    var bigDataPool = new AzureNative.Synapse.BigDataPool("bigDataPool", new()
    {
        AutoPause = new AzureNative.Synapse.Inputs.AutoPausePropertiesArgs
        {
            DelayInMinutes = 15,
            Enabled = true,
        },
        AutoScale = new AzureNative.Synapse.Inputs.AutoScalePropertiesArgs
        {
            Enabled = true,
            MaxNodeCount = 50,
            MinNodeCount = 3,
        },
        BigDataPoolName = "ExamplePool",
        DefaultSparkLogFolder = "/logs",
        LibraryRequirements = new AzureNative.Synapse.Inputs.LibraryRequirementsArgs
        {
            Content = "",
            Filename = "requirements.txt",
        },
        Location = "West US 2",
        NodeCount = 4,
        NodeSize = "Medium",
        NodeSizeFamily = "MemoryOptimized",
        ResourceGroupName = "ExampleResourceGroup",
        SparkEventsFolder = "/events",
        SparkVersion = "3.3",
        Tags = 
        {
            { "key", "value" },
        },
        WorkspaceName = "ExampleWorkspace",
    });

});
package main

import (
	synapse "github.com/pulumi/pulumi-azure-native/sdk/go/azure/synapse"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := synapse.NewBigDataPool(ctx, "bigDataPool", &synapse.BigDataPoolArgs{
			AutoPause: &synapse.AutoPausePropertiesArgs{
				DelayInMinutes: pulumi.Int(15),
				Enabled:        pulumi.Bool(true),
			},
			AutoScale: &synapse.AutoScalePropertiesArgs{
				Enabled:      pulumi.Bool(true),
				MaxNodeCount: pulumi.Int(50),
				MinNodeCount: pulumi.Int(3),
			},
			BigDataPoolName:       pulumi.String("ExamplePool"),
			DefaultSparkLogFolder: pulumi.String("/logs"),
			LibraryRequirements: &synapse.LibraryRequirementsArgs{
				Content:  pulumi.String(""),
				Filename: pulumi.String("requirements.txt"),
			},
			Location:          pulumi.String("West US 2"),
			NodeCount:         pulumi.Int(4),
			NodeSize:          pulumi.String("Medium"),
			NodeSizeFamily:    pulumi.String("MemoryOptimized"),
			ResourceGroupName: pulumi.String("ExampleResourceGroup"),
			SparkEventsFolder: pulumi.String("/events"),
			SparkVersion:      pulumi.String("3.3"),
			Tags: pulumi.StringMap{
				"key": pulumi.String("value"),
			},
			WorkspaceName: pulumi.String("ExampleWorkspace"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.synapse.BigDataPool;
import com.pulumi.azurenative.synapse.BigDataPoolArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var bigDataPool = new BigDataPool("bigDataPool", BigDataPoolArgs.builder()        
            .autoPause(Map.ofEntries(
                Map.entry("delayInMinutes", 15),
                Map.entry("enabled", true)
            ))
            .autoScale(Map.ofEntries(
                Map.entry("enabled", true),
                Map.entry("maxNodeCount", 50),
                Map.entry("minNodeCount", 3)
            ))
            .bigDataPoolName("ExamplePool")
            .defaultSparkLogFolder("/logs")
            .libraryRequirements(Map.ofEntries(
                Map.entry("content", ""),
                Map.entry("filename", "requirements.txt")
            ))
            .location("West US 2")
            .nodeCount(4)
            .nodeSize("Medium")
            .nodeSizeFamily("MemoryOptimized")
            .resourceGroupName("ExampleResourceGroup")
            .sparkEventsFolder("/events")
            .sparkVersion("3.3")
            .tags(Map.of("key", "value"))
            .workspaceName("ExampleWorkspace")
            .build());

    }
}
import pulumi
import pulumi_azure_native as azure_native

big_data_pool = azure_native.synapse.BigDataPool("bigDataPool",
    auto_pause=azure_native.synapse.AutoPausePropertiesArgs(
        delay_in_minutes=15,
        enabled=True,
    ),
    auto_scale=azure_native.synapse.AutoScalePropertiesArgs(
        enabled=True,
        max_node_count=50,
        min_node_count=3,
    ),
    big_data_pool_name="ExamplePool",
    default_spark_log_folder="/logs",
    library_requirements=azure_native.synapse.LibraryRequirementsArgs(
        content="",
        filename="requirements.txt",
    ),
    location="West US 2",
    node_count=4,
    node_size="Medium",
    node_size_family="MemoryOptimized",
    resource_group_name="ExampleResourceGroup",
    spark_events_folder="/events",
    spark_version="3.3",
    tags={
        "key": "value",
    },
    workspace_name="ExampleWorkspace")
import * as pulumi from "@pulumi/pulumi";
import * as azure_native from "@pulumi/azure-native";

const bigDataPool = new azure_native.synapse.BigDataPool("bigDataPool", {
    autoPause: {
        delayInMinutes: 15,
        enabled: true,
    },
    autoScale: {
        enabled: true,
        maxNodeCount: 50,
        minNodeCount: 3,
    },
    bigDataPoolName: "ExamplePool",
    defaultSparkLogFolder: "/logs",
    libraryRequirements: {
        content: "",
        filename: "requirements.txt",
    },
    location: "West US 2",
    nodeCount: 4,
    nodeSize: "Medium",
    nodeSizeFamily: "MemoryOptimized",
    resourceGroupName: "ExampleResourceGroup",
    sparkEventsFolder: "/events",
    sparkVersion: "3.3",
    tags: {
        key: "value",
    },
    workspaceName: "ExampleWorkspace",
});
resources:
  bigDataPool:
    type: azure-native:synapse:BigDataPool
    properties:
      autoPause:
        delayInMinutes: 15
        enabled: true
      autoScale:
        enabled: true
        maxNodeCount: 50
        minNodeCount: 3
      bigDataPoolName: ExamplePool
      defaultSparkLogFolder: /logs
      libraryRequirements:
        content:
        filename: requirements.txt
      location: West US 2
      nodeCount: 4
      nodeSize: Medium
      nodeSizeFamily: MemoryOptimized
      resourceGroupName: ExampleResourceGroup
      sparkEventsFolder: /events
      sparkVersion: '3.3'
      tags:
        key: value
      workspaceName: ExampleWorkspace

Create BigDataPool Resource

new BigDataPool(name: string, args: BigDataPoolArgs, opts?: CustomResourceOptions);
@overload
def BigDataPool(resource_name: str,
                opts: Optional[ResourceOptions] = None,
                auto_pause: Optional[AutoPausePropertiesArgs] = None,
                auto_scale: Optional[AutoScalePropertiesArgs] = None,
                big_data_pool_name: Optional[str] = None,
                cache_size: Optional[int] = None,
                creation_date: Optional[str] = None,
                custom_libraries: Optional[Sequence[LibraryInfoArgs]] = None,
                default_spark_log_folder: Optional[str] = None,
                dynamic_executor_allocation: Optional[DynamicExecutorAllocationArgs] = None,
                force: Optional[bool] = None,
                is_compute_isolation_enabled: Optional[bool] = None,
                library_requirements: Optional[LibraryRequirementsArgs] = None,
                location: Optional[str] = None,
                node_count: Optional[int] = None,
                node_size: Optional[Union[str, NodeSize]] = None,
                node_size_family: Optional[Union[str, NodeSizeFamily]] = None,
                provisioning_state: Optional[str] = None,
                resource_group_name: Optional[str] = None,
                session_level_packages_enabled: Optional[bool] = None,
                spark_config_properties: Optional[LibraryRequirementsArgs] = None,
                spark_events_folder: Optional[str] = None,
                spark_version: Optional[str] = None,
                tags: Optional[Mapping[str, str]] = None,
                workspace_name: Optional[str] = None)
@overload
def BigDataPool(resource_name: str,
                args: BigDataPoolArgs,
                opts: Optional[ResourceOptions] = None)
func NewBigDataPool(ctx *Context, name string, args BigDataPoolArgs, opts ...ResourceOption) (*BigDataPool, error)
public BigDataPool(string name, BigDataPoolArgs args, CustomResourceOptions? opts = null)
public BigDataPool(String name, BigDataPoolArgs args)
public BigDataPool(String name, BigDataPoolArgs args, CustomResourceOptions options)
type: azure-native:synapse:BigDataPool
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args BigDataPoolArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args BigDataPoolArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args BigDataPoolArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args BigDataPoolArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args BigDataPoolArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

BigDataPool Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The BigDataPool resource accepts the following input properties:

ResourceGroupName string

The name of the resource group. The name is case insensitive.

WorkspaceName string

The name of the workspace

AutoPause Pulumi.AzureNative.Synapse.Inputs.AutoPausePropertiesArgs

Auto-pausing properties

AutoScale Pulumi.AzureNative.Synapse.Inputs.AutoScalePropertiesArgs

Auto-scaling properties

BigDataPoolName string

Big Data pool name

CacheSize int

The cache size

CreationDate string

The time when the Big Data pool was created.

CustomLibraries List<Pulumi.AzureNative.Synapse.Inputs.LibraryInfoArgs>

List of custom libraries/packages associated with the spark pool.

DefaultSparkLogFolder string

The default folder where Spark logs will be written.

DynamicExecutorAllocation Pulumi.AzureNative.Synapse.Inputs.DynamicExecutorAllocationArgs

Dynamic Executor Allocation

Force bool

Whether to stop any running jobs in the Big Data pool

IsComputeIsolationEnabled bool

Whether compute isolation is required or not.

LibraryRequirements Pulumi.AzureNative.Synapse.Inputs.LibraryRequirementsArgs

Library version requirements

Location string

The geo-location where the resource lives

NodeCount int

The number of nodes in the Big Data pool.

NodeSize string | Pulumi.AzureNative.Synapse.NodeSize

The level of compute power that each node in the Big Data pool has.

NodeSizeFamily string | Pulumi.AzureNative.Synapse.NodeSizeFamily

The kind of nodes that the Big Data pool provides.

ProvisioningState string

The state of the Big Data pool.

SessionLevelPackagesEnabled bool

Whether session level packages enabled.

SparkConfigProperties Pulumi.AzureNative.Synapse.Inputs.LibraryRequirementsArgs

Spark configuration file to specify additional properties

SparkEventsFolder string

The Spark events folder

SparkVersion string

The Apache Spark version.

Tags Dictionary<string, string>

Resource tags.

ResourceGroupName string

The name of the resource group. The name is case insensitive.

WorkspaceName string

The name of the workspace

AutoPause AutoPausePropertiesArgs

Auto-pausing properties

AutoScale AutoScalePropertiesArgs

Auto-scaling properties

BigDataPoolName string

Big Data pool name

CacheSize int

The cache size

CreationDate string

The time when the Big Data pool was created.

CustomLibraries []LibraryInfoArgs

List of custom libraries/packages associated with the spark pool.

DefaultSparkLogFolder string

The default folder where Spark logs will be written.

DynamicExecutorAllocation DynamicExecutorAllocationArgs

Dynamic Executor Allocation

Force bool

Whether to stop any running jobs in the Big Data pool

IsComputeIsolationEnabled bool

Whether compute isolation is required or not.

LibraryRequirements LibraryRequirementsArgs

Library version requirements

Location string

The geo-location where the resource lives

NodeCount int

The number of nodes in the Big Data pool.

NodeSize string | NodeSize

The level of compute power that each node in the Big Data pool has.

NodeSizeFamily string | NodeSizeFamily

The kind of nodes that the Big Data pool provides.

ProvisioningState string

The state of the Big Data pool.

SessionLevelPackagesEnabled bool

Whether session level packages enabled.

SparkConfigProperties LibraryRequirementsArgs

Spark configuration file to specify additional properties

SparkEventsFolder string

The Spark events folder

SparkVersion string

The Apache Spark version.

Tags map[string]string

Resource tags.

resourceGroupName String

The name of the resource group. The name is case insensitive.

workspaceName String

The name of the workspace

autoPause AutoPausePropertiesArgs

Auto-pausing properties

autoScale AutoScalePropertiesArgs

Auto-scaling properties

bigDataPoolName String

Big Data pool name

cacheSize Integer

The cache size

creationDate String

The time when the Big Data pool was created.

customLibraries List<LibraryInfoArgs>

List of custom libraries/packages associated with the spark pool.

defaultSparkLogFolder String

The default folder where Spark logs will be written.

dynamicExecutorAllocation DynamicExecutorAllocationArgs

Dynamic Executor Allocation

force Boolean

Whether to stop any running jobs in the Big Data pool

isComputeIsolationEnabled Boolean

Whether compute isolation is required or not.

libraryRequirements LibraryRequirementsArgs

Library version requirements

location String

The geo-location where the resource lives

nodeCount Integer

The number of nodes in the Big Data pool.

nodeSize String | NodeSize

The level of compute power that each node in the Big Data pool has.

nodeSizeFamily String | NodeSizeFamily

The kind of nodes that the Big Data pool provides.

provisioningState String

The state of the Big Data pool.

sessionLevelPackagesEnabled Boolean

Whether session level packages enabled.

sparkConfigProperties LibraryRequirementsArgs

Spark configuration file to specify additional properties

sparkEventsFolder String

The Spark events folder

sparkVersion String

The Apache Spark version.

tags Map<String,String>

Resource tags.

resourceGroupName string

The name of the resource group. The name is case insensitive.

workspaceName string

The name of the workspace

autoPause AutoPausePropertiesArgs

Auto-pausing properties

autoScale AutoScalePropertiesArgs

Auto-scaling properties

bigDataPoolName string

Big Data pool name

cacheSize number

The cache size

creationDate string

The time when the Big Data pool was created.

customLibraries LibraryInfoArgs[]

List of custom libraries/packages associated with the spark pool.

defaultSparkLogFolder string

The default folder where Spark logs will be written.

dynamicExecutorAllocation DynamicExecutorAllocationArgs

Dynamic Executor Allocation

force boolean

Whether to stop any running jobs in the Big Data pool

isComputeIsolationEnabled boolean

Whether compute isolation is required or not.

libraryRequirements LibraryRequirementsArgs

Library version requirements

location string

The geo-location where the resource lives

nodeCount number

The number of nodes in the Big Data pool.

nodeSize string | NodeSize

The level of compute power that each node in the Big Data pool has.

nodeSizeFamily string | NodeSizeFamily

The kind of nodes that the Big Data pool provides.

provisioningState string

The state of the Big Data pool.

sessionLevelPackagesEnabled boolean

Whether session level packages enabled.

sparkConfigProperties LibraryRequirementsArgs

Spark configuration file to specify additional properties

sparkEventsFolder string

The Spark events folder

sparkVersion string

The Apache Spark version.

tags {[key: string]: string}

Resource tags.

resource_group_name str

The name of the resource group. The name is case insensitive.

workspace_name str

The name of the workspace

auto_pause AutoPausePropertiesArgs

Auto-pausing properties

auto_scale AutoScalePropertiesArgs

Auto-scaling properties

big_data_pool_name str

Big Data pool name

cache_size int

The cache size

creation_date str

The time when the Big Data pool was created.

custom_libraries Sequence[LibraryInfoArgs]

List of custom libraries/packages associated with the spark pool.

default_spark_log_folder str

The default folder where Spark logs will be written.

dynamic_executor_allocation DynamicExecutorAllocationArgs

Dynamic Executor Allocation

force bool

Whether to stop any running jobs in the Big Data pool

is_compute_isolation_enabled bool

Whether compute isolation is required or not.

library_requirements LibraryRequirementsArgs

Library version requirements

location str

The geo-location where the resource lives

node_count int

The number of nodes in the Big Data pool.

node_size str | NodeSize

The level of compute power that each node in the Big Data pool has.

node_size_family str | NodeSizeFamily

The kind of nodes that the Big Data pool provides.

provisioning_state str

The state of the Big Data pool.

session_level_packages_enabled bool

Whether session level packages enabled.

spark_config_properties LibraryRequirementsArgs

Spark configuration file to specify additional properties

spark_events_folder str

The Spark events folder

spark_version str

The Apache Spark version.

tags Mapping[str, str]

Resource tags.

resourceGroupName String

The name of the resource group. The name is case insensitive.

workspaceName String

The name of the workspace

autoPause Property Map

Auto-pausing properties

autoScale Property Map

Auto-scaling properties

bigDataPoolName String

Big Data pool name

cacheSize Number

The cache size

creationDate String

The time when the Big Data pool was created.

customLibraries List<Property Map>

List of custom libraries/packages associated with the spark pool.

defaultSparkLogFolder String

The default folder where Spark logs will be written.

dynamicExecutorAllocation Property Map

Dynamic Executor Allocation

force Boolean

Whether to stop any running jobs in the Big Data pool

isComputeIsolationEnabled Boolean

Whether compute isolation is required or not.

libraryRequirements Property Map

Library version requirements

location String

The geo-location where the resource lives

nodeCount Number

The number of nodes in the Big Data pool.

nodeSize String | "None" | "Small" | "Medium" | "Large" | "XLarge" | "XXLarge" | "XXXLarge"

The level of compute power that each node in the Big Data pool has.

nodeSizeFamily String | "None" | "MemoryOptimized" | "HardwareAcceleratedFPGA" | "HardwareAcceleratedGPU"

The kind of nodes that the Big Data pool provides.

provisioningState String

The state of the Big Data pool.

sessionLevelPackagesEnabled Boolean

Whether session level packages enabled.

sparkConfigProperties Property Map

Spark configuration file to specify additional properties

sparkEventsFolder String

The Spark events folder

sparkVersion String

The Apache Spark version.

tags Map<String>

Resource tags.

Outputs

All input properties are implicitly available as output properties. Additionally, the BigDataPool resource produces the following output properties:

Id string

The provider-assigned unique ID for this managed resource.

LastSucceededTimestamp string

The time when the Big Data pool was updated successfully.

Name string

The name of the resource

Type string

The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

Id string

The provider-assigned unique ID for this managed resource.

LastSucceededTimestamp string

The time when the Big Data pool was updated successfully.

Name string

The name of the resource

Type string

The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

id String

The provider-assigned unique ID for this managed resource.

lastSucceededTimestamp String

The time when the Big Data pool was updated successfully.

name String

The name of the resource

type String

The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

id string

The provider-assigned unique ID for this managed resource.

lastSucceededTimestamp string

The time when the Big Data pool was updated successfully.

name string

The name of the resource

type string

The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

id str

The provider-assigned unique ID for this managed resource.

last_succeeded_timestamp str

The time when the Big Data pool was updated successfully.

name str

The name of the resource

type str

The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

id String

The provider-assigned unique ID for this managed resource.

lastSucceededTimestamp String

The time when the Big Data pool was updated successfully.

name String

The name of the resource

type String

The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

Supporting Types

AutoPauseProperties

DelayInMinutes int

Number of minutes of idle time before the Big Data pool is automatically paused.

Enabled bool

Whether auto-pausing is enabled for the Big Data pool.

DelayInMinutes int

Number of minutes of idle time before the Big Data pool is automatically paused.

Enabled bool

Whether auto-pausing is enabled for the Big Data pool.

delayInMinutes Integer

Number of minutes of idle time before the Big Data pool is automatically paused.

enabled Boolean

Whether auto-pausing is enabled for the Big Data pool.

delayInMinutes number

Number of minutes of idle time before the Big Data pool is automatically paused.

enabled boolean

Whether auto-pausing is enabled for the Big Data pool.

delay_in_minutes int

Number of minutes of idle time before the Big Data pool is automatically paused.

enabled bool

Whether auto-pausing is enabled for the Big Data pool.

delayInMinutes Number

Number of minutes of idle time before the Big Data pool is automatically paused.

enabled Boolean

Whether auto-pausing is enabled for the Big Data pool.

AutoPausePropertiesResponse

DelayInMinutes int

Number of minutes of idle time before the Big Data pool is automatically paused.

Enabled bool

Whether auto-pausing is enabled for the Big Data pool.

DelayInMinutes int

Number of minutes of idle time before the Big Data pool is automatically paused.

Enabled bool

Whether auto-pausing is enabled for the Big Data pool.

delayInMinutes Integer

Number of minutes of idle time before the Big Data pool is automatically paused.

enabled Boolean

Whether auto-pausing is enabled for the Big Data pool.

delayInMinutes number

Number of minutes of idle time before the Big Data pool is automatically paused.

enabled boolean

Whether auto-pausing is enabled for the Big Data pool.

delay_in_minutes int

Number of minutes of idle time before the Big Data pool is automatically paused.

enabled bool

Whether auto-pausing is enabled for the Big Data pool.

delayInMinutes Number

Number of minutes of idle time before the Big Data pool is automatically paused.

enabled Boolean

Whether auto-pausing is enabled for the Big Data pool.

AutoScaleProperties

Enabled bool

Whether automatic scaling is enabled for the Big Data pool.

MaxNodeCount int

The maximum number of nodes the Big Data pool can support.

MinNodeCount int

The minimum number of nodes the Big Data pool can support.

Enabled bool

Whether automatic scaling is enabled for the Big Data pool.

MaxNodeCount int

The maximum number of nodes the Big Data pool can support.

MinNodeCount int

The minimum number of nodes the Big Data pool can support.

enabled Boolean

Whether automatic scaling is enabled for the Big Data pool.

maxNodeCount Integer

The maximum number of nodes the Big Data pool can support.

minNodeCount Integer

The minimum number of nodes the Big Data pool can support.

enabled boolean

Whether automatic scaling is enabled for the Big Data pool.

maxNodeCount number

The maximum number of nodes the Big Data pool can support.

minNodeCount number

The minimum number of nodes the Big Data pool can support.

enabled bool

Whether automatic scaling is enabled for the Big Data pool.

max_node_count int

The maximum number of nodes the Big Data pool can support.

min_node_count int

The minimum number of nodes the Big Data pool can support.

enabled Boolean

Whether automatic scaling is enabled for the Big Data pool.

maxNodeCount Number

The maximum number of nodes the Big Data pool can support.

minNodeCount Number

The minimum number of nodes the Big Data pool can support.

AutoScalePropertiesResponse

Enabled bool

Whether automatic scaling is enabled for the Big Data pool.

MaxNodeCount int

The maximum number of nodes the Big Data pool can support.

MinNodeCount int

The minimum number of nodes the Big Data pool can support.

Enabled bool

Whether automatic scaling is enabled for the Big Data pool.

MaxNodeCount int

The maximum number of nodes the Big Data pool can support.

MinNodeCount int

The minimum number of nodes the Big Data pool can support.

enabled Boolean

Whether automatic scaling is enabled for the Big Data pool.

maxNodeCount Integer

The maximum number of nodes the Big Data pool can support.

minNodeCount Integer

The minimum number of nodes the Big Data pool can support.

enabled boolean

Whether automatic scaling is enabled for the Big Data pool.

maxNodeCount number

The maximum number of nodes the Big Data pool can support.

minNodeCount number

The minimum number of nodes the Big Data pool can support.

enabled bool

Whether automatic scaling is enabled for the Big Data pool.

max_node_count int

The maximum number of nodes the Big Data pool can support.

min_node_count int

The minimum number of nodes the Big Data pool can support.

enabled Boolean

Whether automatic scaling is enabled for the Big Data pool.

maxNodeCount Number

The maximum number of nodes the Big Data pool can support.

minNodeCount Number

The minimum number of nodes the Big Data pool can support.

DynamicExecutorAllocation

Enabled bool

Indicates whether Dynamic Executor Allocation is enabled or not.

Enabled bool

Indicates whether Dynamic Executor Allocation is enabled or not.

enabled Boolean

Indicates whether Dynamic Executor Allocation is enabled or not.

enabled boolean

Indicates whether Dynamic Executor Allocation is enabled or not.

enabled bool

Indicates whether Dynamic Executor Allocation is enabled or not.

enabled Boolean

Indicates whether Dynamic Executor Allocation is enabled or not.

DynamicExecutorAllocationResponse

Enabled bool

Indicates whether Dynamic Executor Allocation is enabled or not.

Enabled bool

Indicates whether Dynamic Executor Allocation is enabled or not.

enabled Boolean

Indicates whether Dynamic Executor Allocation is enabled or not.

enabled boolean

Indicates whether Dynamic Executor Allocation is enabled or not.

enabled bool

Indicates whether Dynamic Executor Allocation is enabled or not.

enabled Boolean

Indicates whether Dynamic Executor Allocation is enabled or not.

LibraryInfo

ContainerName string

Storage blob container name.

Name string

Name of the library.

Path string

Storage blob path of library.

Type string

Type of the library.

ContainerName string

Storage blob container name.

Name string

Name of the library.

Path string

Storage blob path of library.

Type string

Type of the library.

containerName String

Storage blob container name.

name String

Name of the library.

path String

Storage blob path of library.

type String

Type of the library.

containerName string

Storage blob container name.

name string

Name of the library.

path string

Storage blob path of library.

type string

Type of the library.

container_name str

Storage blob container name.

name str

Name of the library.

path str

Storage blob path of library.

type str

Type of the library.

containerName String

Storage blob container name.

name String

Name of the library.

path String

Storage blob path of library.

type String

Type of the library.

LibraryInfoResponse

CreatorId string

Creator Id of the library/package.

ProvisioningStatus string

Provisioning status of the library/package.

UploadedTimestamp string

The last update time of the library.

ContainerName string

Storage blob container name.

Name string

Name of the library.

Path string

Storage blob path of library.

Type string

Type of the library.

CreatorId string

Creator Id of the library/package.

ProvisioningStatus string

Provisioning status of the library/package.

UploadedTimestamp string

The last update time of the library.

ContainerName string

Storage blob container name.

Name string

Name of the library.

Path string

Storage blob path of library.

Type string

Type of the library.

creatorId String

Creator Id of the library/package.

provisioningStatus String

Provisioning status of the library/package.

uploadedTimestamp String

The last update time of the library.

containerName String

Storage blob container name.

name String

Name of the library.

path String

Storage blob path of library.

type String

Type of the library.

creatorId string

Creator Id of the library/package.

provisioningStatus string

Provisioning status of the library/package.

uploadedTimestamp string

The last update time of the library.

containerName string

Storage blob container name.

name string

Name of the library.

path string

Storage blob path of library.

type string

Type of the library.

creator_id str

Creator Id of the library/package.

provisioning_status str

Provisioning status of the library/package.

uploaded_timestamp str

The last update time of the library.

container_name str

Storage blob container name.

name str

Name of the library.

path str

Storage blob path of library.

type str

Type of the library.

creatorId String

Creator Id of the library/package.

provisioningStatus String

Provisioning status of the library/package.

uploadedTimestamp String

The last update time of the library.

containerName String

Storage blob container name.

name String

Name of the library.

path String

Storage blob path of library.

type String

Type of the library.

LibraryRequirements

Content string

The library requirements.

Filename string

The filename of the library requirements file.

Content string

The library requirements.

Filename string

The filename of the library requirements file.

content String

The library requirements.

filename String

The filename of the library requirements file.

content string

The library requirements.

filename string

The filename of the library requirements file.

content str

The library requirements.

filename str

The filename of the library requirements file.

content String

The library requirements.

filename String

The filename of the library requirements file.

LibraryRequirementsResponse

Time string

The last update time of the library requirements file.

Content string

The library requirements.

Filename string

The filename of the library requirements file.

Time string

The last update time of the library requirements file.

Content string

The library requirements.

Filename string

The filename of the library requirements file.

time String

The last update time of the library requirements file.

content String

The library requirements.

filename String

The filename of the library requirements file.

time string

The last update time of the library requirements file.

content string

The library requirements.

filename string

The filename of the library requirements file.

time str

The last update time of the library requirements file.

content str

The library requirements.

filename str

The filename of the library requirements file.

time String

The last update time of the library requirements file.

content String

The library requirements.

filename String

The filename of the library requirements file.

NodeSize

None
None
Small
Small
Medium
Medium
Large
Large
XLarge
XLarge
XXLarge
XXLarge
XXXLarge
XXXLarge
NodeSizeNone
None
NodeSizeSmall
Small
NodeSizeMedium
Medium
NodeSizeLarge
Large
NodeSizeXLarge
XLarge
NodeSizeXXLarge
XXLarge
NodeSizeXXXLarge
XXXLarge
None
None
Small
Small
Medium
Medium
Large
Large
XLarge
XLarge
XXLarge
XXLarge
XXXLarge
XXXLarge
None
None
Small
Small
Medium
Medium
Large
Large
XLarge
XLarge
XXLarge
XXLarge
XXXLarge
XXXLarge
NONE
None
SMALL
Small
MEDIUM
Medium
LARGE
Large
X_LARGE
XLarge
XX_LARGE
XXLarge
XXX_LARGE
XXXLarge
"None"
None
"Small"
Small
"Medium"
Medium
"Large"
Large
"XLarge"
XLarge
"XXLarge"
XXLarge
"XXXLarge"
XXXLarge

NodeSizeFamily

None
None
MemoryOptimized
MemoryOptimized
HardwareAcceleratedFPGA
HardwareAcceleratedFPGA
HardwareAcceleratedGPU
HardwareAcceleratedGPU
NodeSizeFamilyNone
None
NodeSizeFamilyMemoryOptimized
MemoryOptimized
NodeSizeFamilyHardwareAcceleratedFPGA
HardwareAcceleratedFPGA
NodeSizeFamilyHardwareAcceleratedGPU
HardwareAcceleratedGPU
None
None
MemoryOptimized
MemoryOptimized
HardwareAcceleratedFPGA
HardwareAcceleratedFPGA
HardwareAcceleratedGPU
HardwareAcceleratedGPU
None
None
MemoryOptimized
MemoryOptimized
HardwareAcceleratedFPGA
HardwareAcceleratedFPGA
HardwareAcceleratedGPU
HardwareAcceleratedGPU
NONE
None
MEMORY_OPTIMIZED
MemoryOptimized
HARDWARE_ACCELERATED_FPGA
HardwareAcceleratedFPGA
HARDWARE_ACCELERATED_GPU
HardwareAcceleratedGPU
"None"
None
"MemoryOptimized"
MemoryOptimized
"HardwareAcceleratedFPGA"
HardwareAcceleratedFPGA
"HardwareAcceleratedGPU"
HardwareAcceleratedGPU

Import

An existing resource can be imported using its type token, name, and identifier, e.g.

$ pulumi import azure-native:synapse:BigDataPool ExamplePool /subscriptions/01234567-89ab-4def-0123-456789abcdef/resourceGroups/ExampleResourceGroup/providers/Microsoft.Synapse/workspaces/ExampleWorkspace/bigDataPools/ExamplePool 

Package Details

Repository
Azure Native pulumi/pulumi-azure-native
License
Apache-2.0