Use Pulumi's new import command to generate code from existing cloud resources. Learn More

BigDataPool

A Big Data pool

Example Usage

Create or update a Big Data pool

using Pulumi;
using AzureNextGen = Pulumi.AzureNextGen;

class MyStack : Stack
{
    public MyStack()
    {
        var bigDataPool = new AzureNextGen.Synapse.V20190601Preview.BigDataPool("bigDataPool", new AzureNextGen.Synapse.V20190601Preview.BigDataPoolArgs
        {
            AutoPause = new AzureNextGen.Synapse.V20190601Preview.Inputs.AutoPausePropertiesArgs
            {
                DelayInMinutes = 15,
                Enabled = true,
            },
            AutoScale = new AzureNextGen.Synapse.V20190601Preview.Inputs.AutoScalePropertiesArgs
            {
                Enabled = true,
                MaxNodeCount = 50,
                MinNodeCount = 3,
            },
            BigDataPoolName = "ExamplePool",
            DefaultSparkLogFolder = "/logs",
            LibraryRequirements = new AzureNextGen.Synapse.V20190601Preview.Inputs.LibraryRequirementsArgs
            {
                Content = "",
                Filename = "requirements.txt",
            },
            Location = "West US 2",
            NodeCount = 4,
            NodeSize = "Medium",
            NodeSizeFamily = "MemoryOptimized",
            ResourceGroupName = "ExampleResourceGroup",
            SparkEventsFolder = "/events",
            SparkVersion = "2.4",
            Tags = 
            {
                { "key", "value" },
            },
            WorkspaceName = "ExampleWorkspace",
        });
    }

}
package main

import (
    synapse "github.com/pulumi/pulumi-azure-nextgen/sdk/go/azure/synapse/v20190601preview"
    "github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)

func main() {
    pulumi.Run(func(ctx *pulumi.Context) error {
        _, err := synapse.NewBigDataPool(ctx, "bigDataPool", &synapse.BigDataPoolArgs{
            AutoPause: &synapse.AutoPausePropertiesArgs{
                DelayInMinutes: pulumi.Int(15),
                Enabled:        pulumi.Bool(true),
            },
            AutoScale: &synapse.AutoScalePropertiesArgs{
                Enabled:      pulumi.Bool(true),
                MaxNodeCount: pulumi.Int(50),
                MinNodeCount: pulumi.Int(3),
            },
            BigDataPoolName:       pulumi.String("ExamplePool"),
            DefaultSparkLogFolder: pulumi.String("/logs"),
            LibraryRequirements: &synapse.LibraryRequirementsArgs{
                Content:  pulumi.String(""),
                Filename: pulumi.String("requirements.txt"),
            },
            Location:          pulumi.String("West US 2"),
            NodeCount:         pulumi.Int(4),
            NodeSize:          pulumi.String("Medium"),
            NodeSizeFamily:    pulumi.String("MemoryOptimized"),
            ResourceGroupName: pulumi.String("ExampleResourceGroup"),
            SparkEventsFolder: pulumi.String("/events"),
            SparkVersion:      pulumi.String("2.4"),
            Tags: pulumi.StringMap{
                "key": pulumi.String("value"),
            },
            WorkspaceName: pulumi.String("ExampleWorkspace"),
        })
        if err != nil {
            return err
        }
        return nil
    })
}
import pulumi
import pulumi_azure_nextgen as azure_nextgen

big_data_pool = azure_nextgen.synapse.v20190601preview.BigDataPool("bigDataPool",
    auto_pause=azure_nextgen.synapse.v20190601preview.AutoPausePropertiesArgs(
        delay_in_minutes=15,
        enabled=True,
    ),
    auto_scale=azure_nextgen.synapse.v20190601preview.AutoScalePropertiesArgs(
        enabled=True,
        max_node_count=50,
        min_node_count=3,
    ),
    big_data_pool_name="ExamplePool",
    default_spark_log_folder="/logs",
    library_requirements=azure_nextgen.synapse.v20190601preview.LibraryRequirementsArgs(
        content="",
        filename="requirements.txt",
    ),
    location="West US 2",
    node_count=4,
    node_size="Medium",
    node_size_family="MemoryOptimized",
    resource_group_name="ExampleResourceGroup",
    spark_events_folder="/events",
    spark_version="2.4",
    tags={
        "key": "value",
    },
    workspace_name="ExampleWorkspace")
import * as pulumi from "@pulumi/pulumi";
import * as azure_nextgen from "@pulumi/azure-nextgen";

const bigDataPool = new azure_nextgen.synapse.v20190601preview.BigDataPool("bigDataPool", {
    autoPause: {
        delayInMinutes: 15,
        enabled: true,
    },
    autoScale: {
        enabled: true,
        maxNodeCount: 50,
        minNodeCount: 3,
    },
    bigDataPoolName: "ExamplePool",
    defaultSparkLogFolder: "/logs",
    libraryRequirements: {
        content: "",
        filename: "requirements.txt",
    },
    location: "West US 2",
    nodeCount: 4,
    nodeSize: "Medium",
    nodeSizeFamily: "MemoryOptimized",
    resourceGroupName: "ExampleResourceGroup",
    sparkEventsFolder: "/events",
    sparkVersion: "2.4",
    tags: {
        key: "value",
    },
    workspaceName: "ExampleWorkspace",
});

Create a BigDataPool Resource

new BigDataPool(name: string, args: BigDataPoolArgs, opts?: CustomResourceOptions);
def BigDataPool(resource_name: str, opts: Optional[ResourceOptions] = None, auto_pause: Optional[AutoPausePropertiesArgs] = None, auto_scale: Optional[AutoScalePropertiesArgs] = None, big_data_pool_name: Optional[str] = None, creation_date: Optional[str] = None, default_spark_log_folder: Optional[str] = None, force: Optional[bool] = None, have_library_requirements_changed: Optional[bool] = None, is_compute_isolation_enabled: Optional[bool] = None, library_requirements: Optional[LibraryRequirementsArgs] = None, location: Optional[str] = None, node_count: Optional[int] = None, node_size: Optional[Union[str, NodeSize]] = None, node_size_family: Optional[Union[str, NodeSizeFamily]] = None, provisioning_state: Optional[str] = None, resource_group_name: Optional[str] = None, session_level_packages_enabled: Optional[bool] = None, spark_config_properties: Optional[LibraryRequirementsArgs] = None, spark_events_folder: Optional[str] = None, spark_version: Optional[str] = None, tags: Optional[Mapping[str, str]] = None, workspace_name: Optional[str] = None)
func NewBigDataPool(ctx *Context, name string, args BigDataPoolArgs, opts ...ResourceOption) (*BigDataPool, error)
public BigDataPool(string name, BigDataPoolArgs args, CustomResourceOptions? opts = null)
name string
The unique name of the resource.
args BigDataPoolArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
opts ResourceOptions
A bag of options that control this resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args BigDataPoolArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args BigDataPoolArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.

BigDataPool Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Programming Model docs.

Inputs

The BigDataPool resource accepts the following input properties:

BigDataPoolName string
Big Data pool name
Location string
The geo-location where the resource lives
ResourceGroupName string
The name of the resource group. The name is case insensitive.
WorkspaceName string
The name of the workspace
AutoPause Pulumi.AzureNextGen.Synapse.Inputs.AutoPausePropertiesArgs
Auto-pausing properties
AutoScale Pulumi.AzureNextGen.Synapse.Inputs.AutoScalePropertiesArgs
Auto-scaling properties
CreationDate string
The time when the Big Data pool was created.
DefaultSparkLogFolder string
The default folder where Spark logs will be written.
Force bool
Whether to stop any running jobs in the Big Data pool
HaveLibraryRequirementsChanged bool
Whether library requirements changed.
IsComputeIsolationEnabled bool
Whether compute isolation is required or not.
LibraryRequirements Pulumi.AzureNextGen.Synapse.Inputs.LibraryRequirementsArgs
Library version requirements
NodeCount int
The number of nodes in the Big Data pool.
NodeSize string | Pulumi.AzureNextGen.Synapse.NodeSize
The level of compute power that each node in the Big Data pool has.
NodeSizeFamily string | Pulumi.AzureNextGen.Synapse.NodeSizeFamily
The kind of nodes that the Big Data pool provides.
ProvisioningState string
The state of the Big Data pool.
SessionLevelPackagesEnabled bool
Whether session level packages enabled.
SparkConfigProperties Pulumi.AzureNextGen.Synapse.Inputs.LibraryRequirementsArgs
Spark configuration file to specify additional properties
SparkEventsFolder string
The Spark events folder
SparkVersion string
The Apache Spark version.
Tags Dictionary<string, string>
Resource tags.
BigDataPoolName string
Big Data pool name
Location string
The geo-location where the resource lives
ResourceGroupName string
The name of the resource group. The name is case insensitive.
WorkspaceName string
The name of the workspace
AutoPause AutoPauseProperties
Auto-pausing properties
AutoScale AutoScaleProperties
Auto-scaling properties
CreationDate string
The time when the Big Data pool was created.
DefaultSparkLogFolder string
The default folder where Spark logs will be written.
Force bool
Whether to stop any running jobs in the Big Data pool
HaveLibraryRequirementsChanged bool
Whether library requirements changed.
IsComputeIsolationEnabled bool
Whether compute isolation is required or not.
LibraryRequirements LibraryRequirements
Library version requirements
NodeCount int
The number of nodes in the Big Data pool.
NodeSize string | NodeSize
The level of compute power that each node in the Big Data pool has.
NodeSizeFamily string | NodeSizeFamily
The kind of nodes that the Big Data pool provides.
ProvisioningState string
The state of the Big Data pool.
SessionLevelPackagesEnabled bool
Whether session level packages enabled.
SparkConfigProperties LibraryRequirements
Spark configuration file to specify additional properties
SparkEventsFolder string
The Spark events folder
SparkVersion string
The Apache Spark version.
Tags map[string]string
Resource tags.
bigDataPoolName string
Big Data pool name
location string
The geo-location where the resource lives
resourceGroupName string
The name of the resource group. The name is case insensitive.
workspaceName string
The name of the workspace
autoPause AutoPauseProperties
Auto-pausing properties
autoScale AutoScaleProperties
Auto-scaling properties
creationDate string
The time when the Big Data pool was created.
defaultSparkLogFolder string
The default folder where Spark logs will be written.
force boolean
Whether to stop any running jobs in the Big Data pool
haveLibraryRequirementsChanged boolean
Whether library requirements changed.
isComputeIsolationEnabled boolean
Whether compute isolation is required or not.
libraryRequirements LibraryRequirements
Library version requirements
nodeCount number
The number of nodes in the Big Data pool.
nodeSize string | NodeSize
The level of compute power that each node in the Big Data pool has.
nodeSizeFamily string | NodeSizeFamily
The kind of nodes that the Big Data pool provides.
provisioningState string
The state of the Big Data pool.
sessionLevelPackagesEnabled boolean
Whether session level packages enabled.
sparkConfigProperties LibraryRequirements
Spark configuration file to specify additional properties
sparkEventsFolder string
The Spark events folder
sparkVersion string
The Apache Spark version.
tags {[key: string]: string}
Resource tags.
big_data_pool_name str
Big Data pool name
location str
The geo-location where the resource lives
resource_group_name str
The name of the resource group. The name is case insensitive.
workspace_name str
The name of the workspace
auto_pause AutoPausePropertiesArgs
Auto-pausing properties
auto_scale AutoScalePropertiesArgs
Auto-scaling properties
creation_date str
The time when the Big Data pool was created.
default_spark_log_folder str
The default folder where Spark logs will be written.
force bool
Whether to stop any running jobs in the Big Data pool
have_library_requirements_changed bool
Whether library requirements changed.
is_compute_isolation_enabled bool
Whether compute isolation is required or not.
library_requirements LibraryRequirementsArgs
Library version requirements
node_count int
The number of nodes in the Big Data pool.
node_size str | NodeSize
The level of compute power that each node in the Big Data pool has.
node_size_family str | NodeSizeFamily
The kind of nodes that the Big Data pool provides.
provisioning_state str
The state of the Big Data pool.
session_level_packages_enabled bool
Whether session level packages enabled.
spark_config_properties LibraryRequirementsArgs
Spark configuration file to specify additional properties
spark_events_folder str
The Spark events folder
spark_version str
The Apache Spark version.
tags Mapping[str, str]
Resource tags.

Outputs

All input properties are implicitly available as output properties. Additionally, the BigDataPool resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
Name string
The name of the resource
Type string
The type of the resource. E.g. “Microsoft.Compute/virtualMachines” or “Microsoft.Storage/storageAccounts”
Id string
The provider-assigned unique ID for this managed resource.
Name string
The name of the resource
Type string
The type of the resource. E.g. “Microsoft.Compute/virtualMachines” or “Microsoft.Storage/storageAccounts”
id string
The provider-assigned unique ID for this managed resource.
name string
The name of the resource
type string
The type of the resource. E.g. “Microsoft.Compute/virtualMachines” or “Microsoft.Storage/storageAccounts”
id str
The provider-assigned unique ID for this managed resource.
name str
The name of the resource
type str
The type of the resource. E.g. “Microsoft.Compute/virtualMachines” or “Microsoft.Storage/storageAccounts”

Supporting Types

AutoPauseProperties

DelayInMinutes int
Number of minutes of idle time before the Big Data pool is automatically paused.
Enabled bool
Whether auto-pausing is enabled for the Big Data pool.
DelayInMinutes int
Number of minutes of idle time before the Big Data pool is automatically paused.
Enabled bool
Whether auto-pausing is enabled for the Big Data pool.
delayInMinutes number
Number of minutes of idle time before the Big Data pool is automatically paused.
enabled boolean
Whether auto-pausing is enabled for the Big Data pool.
delay_in_minutes int
Number of minutes of idle time before the Big Data pool is automatically paused.
enabled bool
Whether auto-pausing is enabled for the Big Data pool.

AutoPausePropertiesResponse

DelayInMinutes int
Number of minutes of idle time before the Big Data pool is automatically paused.
Enabled bool
Whether auto-pausing is enabled for the Big Data pool.
DelayInMinutes int
Number of minutes of idle time before the Big Data pool is automatically paused.
Enabled bool
Whether auto-pausing is enabled for the Big Data pool.
delayInMinutes number
Number of minutes of idle time before the Big Data pool is automatically paused.
enabled boolean
Whether auto-pausing is enabled for the Big Data pool.
delay_in_minutes int
Number of minutes of idle time before the Big Data pool is automatically paused.
enabled bool
Whether auto-pausing is enabled for the Big Data pool.

AutoScaleProperties

Enabled bool
Whether automatic scaling is enabled for the Big Data pool.
MaxNodeCount int
The maximum number of nodes the Big Data pool can support.
MinNodeCount int
The minimum number of nodes the Big Data pool can support.
Enabled bool
Whether automatic scaling is enabled for the Big Data pool.
MaxNodeCount int
The maximum number of nodes the Big Data pool can support.
MinNodeCount int
The minimum number of nodes the Big Data pool can support.
enabled boolean
Whether automatic scaling is enabled for the Big Data pool.
maxNodeCount number
The maximum number of nodes the Big Data pool can support.
minNodeCount number
The minimum number of nodes the Big Data pool can support.
enabled bool
Whether automatic scaling is enabled for the Big Data pool.
max_node_count int
The maximum number of nodes the Big Data pool can support.
min_node_count int
The minimum number of nodes the Big Data pool can support.

AutoScalePropertiesResponse

Enabled bool
Whether automatic scaling is enabled for the Big Data pool.
MaxNodeCount int
The maximum number of nodes the Big Data pool can support.
MinNodeCount int
The minimum number of nodes the Big Data pool can support.
Enabled bool
Whether automatic scaling is enabled for the Big Data pool.
MaxNodeCount int
The maximum number of nodes the Big Data pool can support.
MinNodeCount int
The minimum number of nodes the Big Data pool can support.
enabled boolean
Whether automatic scaling is enabled for the Big Data pool.
maxNodeCount number
The maximum number of nodes the Big Data pool can support.
minNodeCount number
The minimum number of nodes the Big Data pool can support.
enabled bool
Whether automatic scaling is enabled for the Big Data pool.
max_node_count int
The maximum number of nodes the Big Data pool can support.
min_node_count int
The minimum number of nodes the Big Data pool can support.

LibraryRequirements

Content string
The library requirements.
Filename string
The filename of the library requirements file.
Content string
The library requirements.
Filename string
The filename of the library requirements file.
content string
The library requirements.
filename string
The filename of the library requirements file.
content str
The library requirements.
filename str
The filename of the library requirements file.

LibraryRequirementsResponse

Time string
The last update time of the library requirements file.
Content string
The library requirements.
Filename string
The filename of the library requirements file.
Time string
The last update time of the library requirements file.
Content string
The library requirements.
Filename string
The filename of the library requirements file.
time string
The last update time of the library requirements file.
content string
The library requirements.
filename string
The filename of the library requirements file.
time str
The last update time of the library requirements file.
content str
The library requirements.
filename str
The filename of the library requirements file.

NodeSize

None
None
Small
Small
Medium
Medium
Large
Large
XLarge
XLarge
XXLarge
XXLarge
XXXLarge
XXXLarge
NodeSizeNone
None
NodeSizeSmall
Small
NodeSizeMedium
Medium
NodeSizeLarge
Large
NodeSizeXLarge
XLarge
NodeSizeXXLarge
XXLarge
NodeSizeXXXLarge
XXXLarge
None
None
Small
Small
Medium
Medium
Large
Large
XLarge
XLarge
XXLarge
XXLarge
XXXLarge
XXXLarge
NONE
None
SMALL
Small
MEDIUM
Medium
LARGE
Large
X_LARGE
XLarge
XX_LARGE
XXLarge
XXX_LARGE
XXXLarge

NodeSizeFamily

None
None
MemoryOptimized
MemoryOptimized
NodeSizeFamilyNone
None
NodeSizeFamilyMemoryOptimized
MemoryOptimized
None
None
MemoryOptimized
MemoryOptimized
NONE
None
MEMORY_OPTIMIZED
MemoryOptimized

Import

An existing resource can be imported using its type token, name, and identifier, e.g.

$ pulumi import azure-nextgen:synapse/v20190601preview:BigDataPool ExamplePool /subscriptions/01234567-89ab-4def-0123-456789abcdef/resourceGroups/ExampleResourceGroup/providers/Microsoft.Synapse/workspaces/ExampleWorkspace/bigDataPools/ExamplePool 

Package Details

Repository
https://github.com/pulumi/pulumi-azure-nextgen
License
Apache-2.0