1. Packages
  2. Azure Classic
  3. API Docs
  4. synapse
  5. SparkPool

We recommend using Azure Native.

Viewing docs for Azure v4.42.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi
azure logo

We recommend using Azure Native.

Viewing docs for Azure v4.42.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi

    Manages a Synapse Spark Pool.

    Create SparkPool Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new SparkPool(name: string, args: SparkPoolArgs, opts?: CustomResourceOptions);
    @overload
    def SparkPool(resource_name: str,
                  args: SparkPoolArgs,
                  opts: Optional[ResourceOptions] = None)
    
    @overload
    def SparkPool(resource_name: str,
                  opts: Optional[ResourceOptions] = None,
                  node_size: Optional[str] = None,
                  synapse_workspace_id: Optional[str] = None,
                  node_size_family: Optional[str] = None,
                  dynamic_executor_allocation_enabled: Optional[bool] = None,
                  session_level_packages_enabled: Optional[bool] = None,
                  library_requirement: Optional[SparkPoolLibraryRequirementArgs] = None,
                  name: Optional[str] = None,
                  node_count: Optional[int] = None,
                  compute_isolation_enabled: Optional[bool] = None,
                  cache_size: Optional[int] = None,
                  auto_pause: Optional[SparkPoolAutoPauseArgs] = None,
                  spark_config: Optional[SparkPoolSparkConfigArgs] = None,
                  spark_events_folder: Optional[str] = None,
                  spark_log_folder: Optional[str] = None,
                  spark_version: Optional[str] = None,
                  auto_scale: Optional[SparkPoolAutoScaleArgs] = None,
                  tags: Optional[Mapping[str, str]] = None)
    func NewSparkPool(ctx *Context, name string, args SparkPoolArgs, opts ...ResourceOption) (*SparkPool, error)
    public SparkPool(string name, SparkPoolArgs args, CustomResourceOptions? opts = null)
    public SparkPool(String name, SparkPoolArgs args)
    public SparkPool(String name, SparkPoolArgs args, CustomResourceOptions options)
    
    type: azure:synapse:SparkPool
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args SparkPoolArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args SparkPoolArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args SparkPoolArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args SparkPoolArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args SparkPoolArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var sparkPoolResource = new Azure.Synapse.SparkPool("sparkPoolResource", new()
    {
        NodeSize = "string",
        SynapseWorkspaceId = "string",
        NodeSizeFamily = "string",
        DynamicExecutorAllocationEnabled = false,
        SessionLevelPackagesEnabled = false,
        LibraryRequirement = new Azure.Synapse.Inputs.SparkPoolLibraryRequirementArgs
        {
            Content = "string",
            Filename = "string",
        },
        Name = "string",
        NodeCount = 0,
        ComputeIsolationEnabled = false,
        CacheSize = 0,
        AutoPause = new Azure.Synapse.Inputs.SparkPoolAutoPauseArgs
        {
            DelayInMinutes = 0,
        },
        SparkConfig = new Azure.Synapse.Inputs.SparkPoolSparkConfigArgs
        {
            Content = "string",
            Filename = "string",
        },
        SparkEventsFolder = "string",
        SparkLogFolder = "string",
        SparkVersion = "string",
        AutoScale = new Azure.Synapse.Inputs.SparkPoolAutoScaleArgs
        {
            MaxNodeCount = 0,
            MinNodeCount = 0,
        },
        Tags = 
        {
            { "string", "string" },
        },
    });
    
    example, err := synapse.NewSparkPool(ctx, "sparkPoolResource", &synapse.SparkPoolArgs{
    	NodeSize:                         pulumi.String("string"),
    	SynapseWorkspaceId:               pulumi.String("string"),
    	NodeSizeFamily:                   pulumi.String("string"),
    	DynamicExecutorAllocationEnabled: pulumi.Bool(false),
    	SessionLevelPackagesEnabled:      pulumi.Bool(false),
    	LibraryRequirement: &synapse.SparkPoolLibraryRequirementArgs{
    		Content:  pulumi.String("string"),
    		Filename: pulumi.String("string"),
    	},
    	Name:                    pulumi.String("string"),
    	NodeCount:               pulumi.Int(0),
    	ComputeIsolationEnabled: pulumi.Bool(false),
    	CacheSize:               pulumi.Int(0),
    	AutoPause: &synapse.SparkPoolAutoPauseArgs{
    		DelayInMinutes: pulumi.Int(0),
    	},
    	SparkConfig: &synapse.SparkPoolSparkConfigArgs{
    		Content:  pulumi.String("string"),
    		Filename: pulumi.String("string"),
    	},
    	SparkEventsFolder: pulumi.String("string"),
    	SparkLogFolder:    pulumi.String("string"),
    	SparkVersion:      pulumi.String("string"),
    	AutoScale: &synapse.SparkPoolAutoScaleArgs{
    		MaxNodeCount: pulumi.Int(0),
    		MinNodeCount: pulumi.Int(0),
    	},
    	Tags: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    })
    
    var sparkPoolResource = new SparkPool("sparkPoolResource", SparkPoolArgs.builder()
        .nodeSize("string")
        .synapseWorkspaceId("string")
        .nodeSizeFamily("string")
        .dynamicExecutorAllocationEnabled(false)
        .sessionLevelPackagesEnabled(false)
        .libraryRequirement(SparkPoolLibraryRequirementArgs.builder()
            .content("string")
            .filename("string")
            .build())
        .name("string")
        .nodeCount(0)
        .computeIsolationEnabled(false)
        .cacheSize(0)
        .autoPause(SparkPoolAutoPauseArgs.builder()
            .delayInMinutes(0)
            .build())
        .sparkConfig(SparkPoolSparkConfigArgs.builder()
            .content("string")
            .filename("string")
            .build())
        .sparkEventsFolder("string")
        .sparkLogFolder("string")
        .sparkVersion("string")
        .autoScale(SparkPoolAutoScaleArgs.builder()
            .maxNodeCount(0)
            .minNodeCount(0)
            .build())
        .tags(Map.of("string", "string"))
        .build());
    
    spark_pool_resource = azure.synapse.SparkPool("sparkPoolResource",
        node_size="string",
        synapse_workspace_id="string",
        node_size_family="string",
        dynamic_executor_allocation_enabled=False,
        session_level_packages_enabled=False,
        library_requirement={
            "content": "string",
            "filename": "string",
        },
        name="string",
        node_count=0,
        compute_isolation_enabled=False,
        cache_size=0,
        auto_pause={
            "delay_in_minutes": 0,
        },
        spark_config={
            "content": "string",
            "filename": "string",
        },
        spark_events_folder="string",
        spark_log_folder="string",
        spark_version="string",
        auto_scale={
            "max_node_count": 0,
            "min_node_count": 0,
        },
        tags={
            "string": "string",
        })
    
    const sparkPoolResource = new azure.synapse.SparkPool("sparkPoolResource", {
        nodeSize: "string",
        synapseWorkspaceId: "string",
        nodeSizeFamily: "string",
        dynamicExecutorAllocationEnabled: false,
        sessionLevelPackagesEnabled: false,
        libraryRequirement: {
            content: "string",
            filename: "string",
        },
        name: "string",
        nodeCount: 0,
        computeIsolationEnabled: false,
        cacheSize: 0,
        autoPause: {
            delayInMinutes: 0,
        },
        sparkConfig: {
            content: "string",
            filename: "string",
        },
        sparkEventsFolder: "string",
        sparkLogFolder: "string",
        sparkVersion: "string",
        autoScale: {
            maxNodeCount: 0,
            minNodeCount: 0,
        },
        tags: {
            string: "string",
        },
    });
    
    type: azure:synapse:SparkPool
    properties:
        autoPause:
            delayInMinutes: 0
        autoScale:
            maxNodeCount: 0
            minNodeCount: 0
        cacheSize: 0
        computeIsolationEnabled: false
        dynamicExecutorAllocationEnabled: false
        libraryRequirement:
            content: string
            filename: string
        name: string
        nodeCount: 0
        nodeSize: string
        nodeSizeFamily: string
        sessionLevelPackagesEnabled: false
        sparkConfig:
            content: string
            filename: string
        sparkEventsFolder: string
        sparkLogFolder: string
        sparkVersion: string
        synapseWorkspaceId: string
        tags:
            string: string
    

    SparkPool Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The SparkPool resource accepts the following input properties:

    NodeSize string
    The level of node in the Spark Pool. Possible value is Small, Medium and Large.
    NodeSizeFamily string
    The kind of nodes that the Spark Pool provides. Possible value is MemoryOptimized.
    SynapseWorkspaceId string
    The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
    AutoPause SparkPoolAutoPause
    An auto_pause block as defined below.
    AutoScale SparkPoolAutoScale
    An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
    CacheSize int
    The cache size in the Spark Pool.
    ComputeIsolationEnabled bool
    Indicates whether compute isolation is enabled or not. Defaults to false.
    DynamicExecutorAllocationEnabled bool
    Indicates whether Dynamic Executor Allocation is enabled or not. Defaults to false.
    LibraryRequirement SparkPoolLibraryRequirement
    A library_requirement block as defined below.
    Name string
    The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
    NodeCount int
    The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
    SessionLevelPackagesEnabled bool
    Indicates whether session level packages are enabled or not. Defaults to false.
    SparkConfig SparkPoolSparkConfig
    A spark_config block as defined below.
    SparkEventsFolder string
    The Spark events folder. Defaults to /events.
    SparkLogFolder string
    The default folder where Spark logs will be written. Defaults to /logs.
    SparkVersion string
    The Apache Spark version. Possible values are 2.4 and 3.1. Defaults to 2.4.
    Tags Dictionary<string, string>
    A mapping of tags which should be assigned to the Synapse Spark Pool.
    NodeSize string
    The level of node in the Spark Pool. Possible value is Small, Medium and Large.
    NodeSizeFamily string
    The kind of nodes that the Spark Pool provides. Possible value is MemoryOptimized.
    SynapseWorkspaceId string
    The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
    AutoPause SparkPoolAutoPauseArgs
    An auto_pause block as defined below.
    AutoScale SparkPoolAutoScaleArgs
    An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
    CacheSize int
    The cache size in the Spark Pool.
    ComputeIsolationEnabled bool
    Indicates whether compute isolation is enabled or not. Defaults to false.
    DynamicExecutorAllocationEnabled bool
    Indicates whether Dynamic Executor Allocation is enabled or not. Defaults to false.
    LibraryRequirement SparkPoolLibraryRequirementArgs
    A library_requirement block as defined below.
    Name string
    The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
    NodeCount int
    The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
    SessionLevelPackagesEnabled bool
    Indicates whether session level packages are enabled or not. Defaults to false.
    SparkConfig SparkPoolSparkConfigArgs
    A spark_config block as defined below.
    SparkEventsFolder string
    The Spark events folder. Defaults to /events.
    SparkLogFolder string
    The default folder where Spark logs will be written. Defaults to /logs.
    SparkVersion string
    The Apache Spark version. Possible values are 2.4 and 3.1. Defaults to 2.4.
    Tags map[string]string
    A mapping of tags which should be assigned to the Synapse Spark Pool.
    nodeSize String
    The level of node in the Spark Pool. Possible value is Small, Medium and Large.
    nodeSizeFamily String
    The kind of nodes that the Spark Pool provides. Possible value is MemoryOptimized.
    synapseWorkspaceId String
    The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
    autoPause SparkPoolAutoPause
    An auto_pause block as defined below.
    autoScale SparkPoolAutoScale
    An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
    cacheSize Integer
    The cache size in the Spark Pool.
    computeIsolationEnabled Boolean
    Indicates whether compute isolation is enabled or not. Defaults to false.
    dynamicExecutorAllocationEnabled Boolean
    Indicates whether Dynamic Executor Allocation is enabled or not. Defaults to false.
    libraryRequirement SparkPoolLibraryRequirement
    A library_requirement block as defined below.
    name String
    The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
    nodeCount Integer
    The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
    sessionLevelPackagesEnabled Boolean
    Indicates whether session level packages are enabled or not. Defaults to false.
    sparkConfig SparkPoolSparkConfig
    A spark_config block as defined below.
    sparkEventsFolder String
    The Spark events folder. Defaults to /events.
    sparkLogFolder String
    The default folder where Spark logs will be written. Defaults to /logs.
    sparkVersion String
    The Apache Spark version. Possible values are 2.4 and 3.1. Defaults to 2.4.
    tags Map<String,String>
    A mapping of tags which should be assigned to the Synapse Spark Pool.
    nodeSize string
    The level of node in the Spark Pool. Possible value is Small, Medium and Large.
    nodeSizeFamily string
    The kind of nodes that the Spark Pool provides. Possible value is MemoryOptimized.
    synapseWorkspaceId string
    The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
    autoPause SparkPoolAutoPause
    An auto_pause block as defined below.
    autoScale SparkPoolAutoScale
    An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
    cacheSize number
    The cache size in the Spark Pool.
    computeIsolationEnabled boolean
    Indicates whether compute isolation is enabled or not. Defaults to false.
    dynamicExecutorAllocationEnabled boolean
    Indicates whether Dynamic Executor Allocation is enabled or not. Defaults to false.
    libraryRequirement SparkPoolLibraryRequirement
    A library_requirement block as defined below.
    name string
    The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
    nodeCount number
    The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
    sessionLevelPackagesEnabled boolean
    Indicates whether session level packages are enabled or not. Defaults to false.
    sparkConfig SparkPoolSparkConfig
    A spark_config block as defined below.
    sparkEventsFolder string
    The Spark events folder. Defaults to /events.
    sparkLogFolder string
    The default folder where Spark logs will be written. Defaults to /logs.
    sparkVersion string
    The Apache Spark version. Possible values are 2.4 and 3.1. Defaults to 2.4.
    tags {[key: string]: string}
    A mapping of tags which should be assigned to the Synapse Spark Pool.
    node_size str
    The level of node in the Spark Pool. Possible value is Small, Medium and Large.
    node_size_family str
    The kind of nodes that the Spark Pool provides. Possible value is MemoryOptimized.
    synapse_workspace_id str
    The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
    auto_pause SparkPoolAutoPauseArgs
    An auto_pause block as defined below.
    auto_scale SparkPoolAutoScaleArgs
    An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
    cache_size int
    The cache size in the Spark Pool.
    compute_isolation_enabled bool
    Indicates whether compute isolation is enabled or not. Defaults to false.
    dynamic_executor_allocation_enabled bool
    Indicates whether Dynamic Executor Allocation is enabled or not. Defaults to false.
    library_requirement SparkPoolLibraryRequirementArgs
    A library_requirement block as defined below.
    name str
    The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
    node_count int
    The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
    session_level_packages_enabled bool
    Indicates whether session level packages are enabled or not. Defaults to false.
    spark_config SparkPoolSparkConfigArgs
    A spark_config block as defined below.
    spark_events_folder str
    The Spark events folder. Defaults to /events.
    spark_log_folder str
    The default folder where Spark logs will be written. Defaults to /logs.
    spark_version str
    The Apache Spark version. Possible values are 2.4 and 3.1. Defaults to 2.4.
    tags Mapping[str, str]
    A mapping of tags which should be assigned to the Synapse Spark Pool.
    nodeSize String
    The level of node in the Spark Pool. Possible value is Small, Medium and Large.
    nodeSizeFamily String
    The kind of nodes that the Spark Pool provides. Possible value is MemoryOptimized.
    synapseWorkspaceId String
    The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
    autoPause Property Map
    An auto_pause block as defined below.
    autoScale Property Map
    An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
    cacheSize Number
    The cache size in the Spark Pool.
    computeIsolationEnabled Boolean
    Indicates whether compute isolation is enabled or not. Defaults to false.
    dynamicExecutorAllocationEnabled Boolean
    Indicates whether Dynamic Executor Allocation is enabled or not. Defaults to false.
    libraryRequirement Property Map
    A library_requirement block as defined below.
    name String
    The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
    nodeCount Number
    The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
    sessionLevelPackagesEnabled Boolean
    Indicates whether session level packages are enabled or not. Defaults to false.
    sparkConfig Property Map
    A spark_config block as defined below.
    sparkEventsFolder String
    The Spark events folder. Defaults to /events.
    sparkLogFolder String
    The default folder where Spark logs will be written. Defaults to /logs.
    sparkVersion String
    The Apache Spark version. Possible values are 2.4 and 3.1. Defaults to 2.4.
    tags Map<String>
    A mapping of tags which should be assigned to the Synapse Spark Pool.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the SparkPool resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing SparkPool Resource

    Get an existing SparkPool resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: SparkPoolState, opts?: CustomResourceOptions): SparkPool
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            auto_pause: Optional[SparkPoolAutoPauseArgs] = None,
            auto_scale: Optional[SparkPoolAutoScaleArgs] = None,
            cache_size: Optional[int] = None,
            compute_isolation_enabled: Optional[bool] = None,
            dynamic_executor_allocation_enabled: Optional[bool] = None,
            library_requirement: Optional[SparkPoolLibraryRequirementArgs] = None,
            name: Optional[str] = None,
            node_count: Optional[int] = None,
            node_size: Optional[str] = None,
            node_size_family: Optional[str] = None,
            session_level_packages_enabled: Optional[bool] = None,
            spark_config: Optional[SparkPoolSparkConfigArgs] = None,
            spark_events_folder: Optional[str] = None,
            spark_log_folder: Optional[str] = None,
            spark_version: Optional[str] = None,
            synapse_workspace_id: Optional[str] = None,
            tags: Optional[Mapping[str, str]] = None) -> SparkPool
    func GetSparkPool(ctx *Context, name string, id IDInput, state *SparkPoolState, opts ...ResourceOption) (*SparkPool, error)
    public static SparkPool Get(string name, Input<string> id, SparkPoolState? state, CustomResourceOptions? opts = null)
    public static SparkPool get(String name, Output<String> id, SparkPoolState state, CustomResourceOptions options)
    resources:  _:    type: azure:synapse:SparkPool    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AutoPause SparkPoolAutoPause
    An auto_pause block as defined below.
    AutoScale SparkPoolAutoScale
    An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
    CacheSize int
    The cache size in the Spark Pool.
    ComputeIsolationEnabled bool
    Indicates whether compute isolation is enabled or not. Defaults to false.
    DynamicExecutorAllocationEnabled bool
    Indicates whether Dynamic Executor Allocation is enabled or not. Defaults to false.
    LibraryRequirement SparkPoolLibraryRequirement
    A library_requirement block as defined below.
    Name string
    The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
    NodeCount int
    The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
    NodeSize string
    The level of node in the Spark Pool. Possible value is Small, Medium and Large.
    NodeSizeFamily string
    The kind of nodes that the Spark Pool provides. Possible value is MemoryOptimized.
    SessionLevelPackagesEnabled bool
    Indicates whether session level packages are enabled or not. Defaults to false.
    SparkConfig SparkPoolSparkConfig
    A spark_config block as defined below.
    SparkEventsFolder string
    The Spark events folder. Defaults to /events.
    SparkLogFolder string
    The default folder where Spark logs will be written. Defaults to /logs.
    SparkVersion string
    The Apache Spark version. Possible values are 2.4 and 3.1. Defaults to 2.4.
    SynapseWorkspaceId string
    The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
    Tags Dictionary<string, string>
    A mapping of tags which should be assigned to the Synapse Spark Pool.
    AutoPause SparkPoolAutoPauseArgs
    An auto_pause block as defined below.
    AutoScale SparkPoolAutoScaleArgs
    An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
    CacheSize int
    The cache size in the Spark Pool.
    ComputeIsolationEnabled bool
    Indicates whether compute isolation is enabled or not. Defaults to false.
    DynamicExecutorAllocationEnabled bool
    Indicates whether Dynamic Executor Allocation is enabled or not. Defaults to false.
    LibraryRequirement SparkPoolLibraryRequirementArgs
    A library_requirement block as defined below.
    Name string
    The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
    NodeCount int
    The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
    NodeSize string
    The level of node in the Spark Pool. Possible value is Small, Medium and Large.
    NodeSizeFamily string
    The kind of nodes that the Spark Pool provides. Possible value is MemoryOptimized.
    SessionLevelPackagesEnabled bool
    Indicates whether session level packages are enabled or not. Defaults to false.
    SparkConfig SparkPoolSparkConfigArgs
    A spark_config block as defined below.
    SparkEventsFolder string
    The Spark events folder. Defaults to /events.
    SparkLogFolder string
    The default folder where Spark logs will be written. Defaults to /logs.
    SparkVersion string
    The Apache Spark version. Possible values are 2.4 and 3.1. Defaults to 2.4.
    SynapseWorkspaceId string
    The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
    Tags map[string]string
    A mapping of tags which should be assigned to the Synapse Spark Pool.
    autoPause SparkPoolAutoPause
    An auto_pause block as defined below.
    autoScale SparkPoolAutoScale
    An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
    cacheSize Integer
    The cache size in the Spark Pool.
    computeIsolationEnabled Boolean
    Indicates whether compute isolation is enabled or not. Defaults to false.
    dynamicExecutorAllocationEnabled Boolean
    Indicates whether Dynamic Executor Allocation is enabled or not. Defaults to false.
    libraryRequirement SparkPoolLibraryRequirement
    A library_requirement block as defined below.
    name String
    The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
    nodeCount Integer
    The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
    nodeSize String
    The level of node in the Spark Pool. Possible value is Small, Medium and Large.
    nodeSizeFamily String
    The kind of nodes that the Spark Pool provides. Possible value is MemoryOptimized.
    sessionLevelPackagesEnabled Boolean
    Indicates whether session level packages are enabled or not. Defaults to false.
    sparkConfig SparkPoolSparkConfig
    A spark_config block as defined below.
    sparkEventsFolder String
    The Spark events folder. Defaults to /events.
    sparkLogFolder String
    The default folder where Spark logs will be written. Defaults to /logs.
    sparkVersion String
    The Apache Spark version. Possible values are 2.4 and 3.1. Defaults to 2.4.
    synapseWorkspaceId String
    The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
    tags Map<String,String>
    A mapping of tags which should be assigned to the Synapse Spark Pool.
    autoPause SparkPoolAutoPause
    An auto_pause block as defined below.
    autoScale SparkPoolAutoScale
    An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
    cacheSize number
    The cache size in the Spark Pool.
    computeIsolationEnabled boolean
    Indicates whether compute isolation is enabled or not. Defaults to false.
    dynamicExecutorAllocationEnabled boolean
    Indicates whether Dynamic Executor Allocation is enabled or not. Defaults to false.
    libraryRequirement SparkPoolLibraryRequirement
    A library_requirement block as defined below.
    name string
    The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
    nodeCount number
    The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
    nodeSize string
    The level of node in the Spark Pool. Possible value is Small, Medium and Large.
    nodeSizeFamily string
    The kind of nodes that the Spark Pool provides. Possible value is MemoryOptimized.
    sessionLevelPackagesEnabled boolean
    Indicates whether session level packages are enabled or not. Defaults to false.
    sparkConfig SparkPoolSparkConfig
    A spark_config block as defined below.
    sparkEventsFolder string
    The Spark events folder. Defaults to /events.
    sparkLogFolder string
    The default folder where Spark logs will be written. Defaults to /logs.
    sparkVersion string
    The Apache Spark version. Possible values are 2.4 and 3.1. Defaults to 2.4.
    synapseWorkspaceId string
    The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
    tags {[key: string]: string}
    A mapping of tags which should be assigned to the Synapse Spark Pool.
    auto_pause SparkPoolAutoPauseArgs
    An auto_pause block as defined below.
    auto_scale SparkPoolAutoScaleArgs
    An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
    cache_size int
    The cache size in the Spark Pool.
    compute_isolation_enabled bool
    Indicates whether compute isolation is enabled or not. Defaults to false.
    dynamic_executor_allocation_enabled bool
    Indicates whether Dynamic Executor Allocation is enabled or not. Defaults to false.
    library_requirement SparkPoolLibraryRequirementArgs
    A library_requirement block as defined below.
    name str
    The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
    node_count int
    The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
    node_size str
    The level of node in the Spark Pool. Possible value is Small, Medium and Large.
    node_size_family str
    The kind of nodes that the Spark Pool provides. Possible value is MemoryOptimized.
    session_level_packages_enabled bool
    Indicates whether session level packages are enabled or not. Defaults to false.
    spark_config SparkPoolSparkConfigArgs
    A spark_config block as defined below.
    spark_events_folder str
    The Spark events folder. Defaults to /events.
    spark_log_folder str
    The default folder where Spark logs will be written. Defaults to /logs.
    spark_version str
    The Apache Spark version. Possible values are 2.4 and 3.1. Defaults to 2.4.
    synapse_workspace_id str
    The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
    tags Mapping[str, str]
    A mapping of tags which should be assigned to the Synapse Spark Pool.
    autoPause Property Map
    An auto_pause block as defined below.
    autoScale Property Map
    An auto_scale block as defined below. Exactly one of node_count or auto_scale must be specified.
    cacheSize Number
    The cache size in the Spark Pool.
    computeIsolationEnabled Boolean
    Indicates whether compute isolation is enabled or not. Defaults to false.
    dynamicExecutorAllocationEnabled Boolean
    Indicates whether Dynamic Executor Allocation is enabled or not. Defaults to false.
    libraryRequirement Property Map
    A library_requirement block as defined below.
    name String
    The name which should be used for this Synapse Spark Pool. Changing this forces a new Synapse Spark Pool to be created.
    nodeCount Number
    The number of nodes in the Spark Pool. Exactly one of node_count or auto_scale must be specified.
    nodeSize String
    The level of node in the Spark Pool. Possible value is Small, Medium and Large.
    nodeSizeFamily String
    The kind of nodes that the Spark Pool provides. Possible value is MemoryOptimized.
    sessionLevelPackagesEnabled Boolean
    Indicates whether session level packages are enabled or not. Defaults to false.
    sparkConfig Property Map
    A spark_config block as defined below.
    sparkEventsFolder String
    The Spark events folder. Defaults to /events.
    sparkLogFolder String
    The default folder where Spark logs will be written. Defaults to /logs.
    sparkVersion String
    The Apache Spark version. Possible values are 2.4 and 3.1. Defaults to 2.4.
    synapseWorkspaceId String
    The ID of the Synapse Workspace where the Synapse Spark Pool should exist. Changing this forces a new Synapse Spark Pool to be created.
    tags Map<String>
    A mapping of tags which should be assigned to the Synapse Spark Pool.

    Supporting Types

    SparkPoolAutoPause, SparkPoolAutoPauseArgs

    DelayInMinutes int
    Number of minutes of idle time before the Spark Pool is automatically paused. Must be between 5 and 10080.
    DelayInMinutes int
    Number of minutes of idle time before the Spark Pool is automatically paused. Must be between 5 and 10080.
    delayInMinutes Integer
    Number of minutes of idle time before the Spark Pool is automatically paused. Must be between 5 and 10080.
    delayInMinutes number
    Number of minutes of idle time before the Spark Pool is automatically paused. Must be between 5 and 10080.
    delay_in_minutes int
    Number of minutes of idle time before the Spark Pool is automatically paused. Must be between 5 and 10080.
    delayInMinutes Number
    Number of minutes of idle time before the Spark Pool is automatically paused. Must be between 5 and 10080.

    SparkPoolAutoScale, SparkPoolAutoScaleArgs

    MaxNodeCount int
    The maximum number of nodes the Spark Pool can support. Must be between 3 and 200.
    MinNodeCount int
    The minimum number of nodes the Spark Pool can support. Must be between 3 and 200.
    MaxNodeCount int
    The maximum number of nodes the Spark Pool can support. Must be between 3 and 200.
    MinNodeCount int
    The minimum number of nodes the Spark Pool can support. Must be between 3 and 200.
    maxNodeCount Integer
    The maximum number of nodes the Spark Pool can support. Must be between 3 and 200.
    minNodeCount Integer
    The minimum number of nodes the Spark Pool can support. Must be between 3 and 200.
    maxNodeCount number
    The maximum number of nodes the Spark Pool can support. Must be between 3 and 200.
    minNodeCount number
    The minimum number of nodes the Spark Pool can support. Must be between 3 and 200.
    max_node_count int
    The maximum number of nodes the Spark Pool can support. Must be between 3 and 200.
    min_node_count int
    The minimum number of nodes the Spark Pool can support. Must be between 3 and 200.
    maxNodeCount Number
    The maximum number of nodes the Spark Pool can support. Must be between 3 and 200.
    minNodeCount Number
    The minimum number of nodes the Spark Pool can support. Must be between 3 and 200.

    SparkPoolLibraryRequirement, SparkPoolLibraryRequirementArgs

    Content string
    The content of library requirements.
    Filename string
    The name of the library requirements file.
    Content string
    The content of library requirements.
    Filename string
    The name of the library requirements file.
    content String
    The content of library requirements.
    filename String
    The name of the library requirements file.
    content string
    The content of library requirements.
    filename string
    The name of the library requirements file.
    content str
    The content of library requirements.
    filename str
    The name of the library requirements file.
    content String
    The content of library requirements.
    filename String
    The name of the library requirements file.

    SparkPoolSparkConfig, SparkPoolSparkConfigArgs

    Content string
    The contents of a spark configuration.
    Filename string
    The name of the file where the spark configuration content will be stored.
    Content string
    The contents of a spark configuration.
    Filename string
    The name of the file where the spark configuration content will be stored.
    content String
    The contents of a spark configuration.
    filename String
    The name of the file where the spark configuration content will be stored.
    content string
    The contents of a spark configuration.
    filename string
    The name of the file where the spark configuration content will be stored.
    content str
    The contents of a spark configuration.
    filename str
    The name of the file where the spark configuration content will be stored.
    content String
    The contents of a spark configuration.
    filename String
    The name of the file where the spark configuration content will be stored.

    Import

    Synapse Spark Pool can be imported using the resource id, e.g.

     $ pulumi import azure:synapse/sparkPool:SparkPool example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Synapse/workspaces/workspace1/bigDataPools/sparkPool1
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    Azure Classic pulumi/pulumi-azure
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the azurerm Terraform Provider.
    azure logo

    We recommend using Azure Native.

    Viewing docs for Azure v4.42.0 (Older version)
    published on Monday, Mar 9, 2026 by Pulumi
      Try Pulumi Cloud free. Your team will thank you.