Azure Classic

v5.24.0 published on Wednesday, Nov 9, 2022 by Pulumi

LinkedServiceAzureDatabricks

Manages a Linked Service (connection) between Azure Databricks and Azure Data Factory.

Example Usage

With Managed Identity & New Cluster

using System.Collections.Generic;
using Pulumi;
using Azure = Pulumi.Azure;

return await Deployment.RunAsync(() => 
{
    var exampleResourceGroup = new Azure.Core.ResourceGroup("exampleResourceGroup", new()
    {
        Location = "East US",
    });

    //Create a Linked Service using managed identity and new cluster config
    var exampleFactory = new Azure.DataFactory.Factory("exampleFactory", new()
    {
        Location = exampleResourceGroup.Location,
        ResourceGroupName = exampleResourceGroup.Name,
        Identity = new Azure.DataFactory.Inputs.FactoryIdentityArgs
        {
            Type = "SystemAssigned",
        },
    });

    //Create a databricks instance
    var exampleWorkspace = new Azure.DataBricks.Workspace("exampleWorkspace", new()
    {
        ResourceGroupName = exampleResourceGroup.Name,
        Location = exampleResourceGroup.Location,
        Sku = "standard",
    });

    var msiLinked = new Azure.DataFactory.LinkedServiceAzureDatabricks("msiLinked", new()
    {
        DataFactoryId = exampleFactory.Id,
        Description = "ADB Linked Service via MSI",
        AdbDomain = exampleWorkspace.WorkspaceUrl.Apply(workspaceUrl => $"https://{workspaceUrl}"),
        MsiWorkSpaceResourceId = exampleWorkspace.Id,
        NewClusterConfig = new Azure.DataFactory.Inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs
        {
            NodeType = "Standard_NC12",
            ClusterVersion = "5.5.x-gpu-scala2.11",
            MinNumberOfWorkers = 1,
            MaxNumberOfWorkers = 5,
            DriverNodeType = "Standard_NC12",
            LogDestination = "dbfs:/logs",
            CustomTags = 
            {
                { "custom_tag1", "sct_value_1" },
                { "custom_tag2", "sct_value_2" },
            },
            SparkConfig = 
            {
                { "config1", "value1" },
                { "config2", "value2" },
            },
            SparkEnvironmentVariables = 
            {
                { "envVar1", "value1" },
                { "envVar2", "value2" },
            },
            InitScripts = new[]
            {
                "init.sh",
                "init2.sh",
            },
        },
    });

});
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/databricks"
	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		exampleResourceGroup, err := core.NewResourceGroup(ctx, "exampleResourceGroup", &core.ResourceGroupArgs{
			Location: pulumi.String("East US"),
		})
		if err != nil {
			return err
		}
		exampleFactory, err := datafactory.NewFactory(ctx, "exampleFactory", &datafactory.FactoryArgs{
			Location:          exampleResourceGroup.Location,
			ResourceGroupName: exampleResourceGroup.Name,
			Identity: &datafactory.FactoryIdentityArgs{
				Type: pulumi.String("SystemAssigned"),
			},
		})
		if err != nil {
			return err
		}
		exampleWorkspace, err := databricks.NewWorkspace(ctx, "exampleWorkspace", &databricks.WorkspaceArgs{
			ResourceGroupName: exampleResourceGroup.Name,
			Location:          exampleResourceGroup.Location,
			Sku:               pulumi.String("standard"),
		})
		if err != nil {
			return err
		}
		_, err = datafactory.NewLinkedServiceAzureDatabricks(ctx, "msiLinked", &datafactory.LinkedServiceAzureDatabricksArgs{
			DataFactoryId: exampleFactory.ID(),
			Description:   pulumi.String("ADB Linked Service via MSI"),
			AdbDomain: exampleWorkspace.WorkspaceUrl.ApplyT(func(workspaceUrl string) (string, error) {
				return fmt.Sprintf("https://%v", workspaceUrl), nil
			}).(pulumi.StringOutput),
			MsiWorkSpaceResourceId: exampleWorkspace.ID(),
			NewClusterConfig: &datafactory.LinkedServiceAzureDatabricksNewClusterConfigArgs{
				NodeType:           pulumi.String("Standard_NC12"),
				ClusterVersion:     pulumi.String("5.5.x-gpu-scala2.11"),
				MinNumberOfWorkers: pulumi.Int(1),
				MaxNumberOfWorkers: pulumi.Int(5),
				DriverNodeType:     pulumi.String("Standard_NC12"),
				LogDestination:     pulumi.String("dbfs:/logs"),
				CustomTags: pulumi.StringMap{
					"custom_tag1": pulumi.String("sct_value_1"),
					"custom_tag2": pulumi.String("sct_value_2"),
				},
				SparkConfig: pulumi.StringMap{
					"config1": pulumi.String("value1"),
					"config2": pulumi.String("value2"),
				},
				SparkEnvironmentVariables: pulumi.StringMap{
					"envVar1": pulumi.String("value1"),
					"envVar2": pulumi.String("value2"),
				},
				InitScripts: pulumi.StringArray{
					pulumi.String("init.sh"),
					pulumi.String("init2.sh"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.inputs.FactoryIdentityArgs;
import com.pulumi.azure.databricks.Workspace;
import com.pulumi.azure.databricks.WorkspaceArgs;
import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricks;
import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs;
import com.pulumi.azure.datafactory.inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()        
            .location("East US")
            .build());

        var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()        
            .location(exampleResourceGroup.location())
            .resourceGroupName(exampleResourceGroup.name())
            .identity(FactoryIdentityArgs.builder()
                .type("SystemAssigned")
                .build())
            .build());

        var exampleWorkspace = new Workspace("exampleWorkspace", WorkspaceArgs.builder()        
            .resourceGroupName(exampleResourceGroup.name())
            .location(exampleResourceGroup.location())
            .sku("standard")
            .build());

        var msiLinked = new LinkedServiceAzureDatabricks("msiLinked", LinkedServiceAzureDatabricksArgs.builder()        
            .dataFactoryId(exampleFactory.id())
            .description("ADB Linked Service via MSI")
            .adbDomain(exampleWorkspace.workspaceUrl().applyValue(workspaceUrl -> String.format("https://%s", workspaceUrl)))
            .msiWorkSpaceResourceId(exampleWorkspace.id())
            .newClusterConfig(LinkedServiceAzureDatabricksNewClusterConfigArgs.builder()
                .nodeType("Standard_NC12")
                .clusterVersion("5.5.x-gpu-scala2.11")
                .minNumberOfWorkers(1)
                .maxNumberOfWorkers(5)
                .driverNodeType("Standard_NC12")
                .logDestination("dbfs:/logs")
                .customTags(Map.ofEntries(
                    Map.entry("custom_tag1", "sct_value_1"),
                    Map.entry("custom_tag2", "sct_value_2")
                ))
                .sparkConfig(Map.ofEntries(
                    Map.entry("config1", "value1"),
                    Map.entry("config2", "value2")
                ))
                .sparkEnvironmentVariables(Map.ofEntries(
                    Map.entry("envVar1", "value1"),
                    Map.entry("envVar2", "value2")
                ))
                .initScripts(                
                    "init.sh",
                    "init2.sh")
                .build())
            .build());

    }
}
import pulumi
import pulumi_azure as azure

example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="East US")
#Create a Linked Service using managed identity and new cluster config
example_factory = azure.datafactory.Factory("exampleFactory",
    location=example_resource_group.location,
    resource_group_name=example_resource_group.name,
    identity=azure.datafactory.FactoryIdentityArgs(
        type="SystemAssigned",
    ))
#Create a databricks instance
example_workspace = azure.databricks.Workspace("exampleWorkspace",
    resource_group_name=example_resource_group.name,
    location=example_resource_group.location,
    sku="standard")
msi_linked = azure.datafactory.LinkedServiceAzureDatabricks("msiLinked",
    data_factory_id=example_factory.id,
    description="ADB Linked Service via MSI",
    adb_domain=example_workspace.workspace_url.apply(lambda workspace_url: f"https://{workspace_url}"),
    msi_work_space_resource_id=example_workspace.id,
    new_cluster_config=azure.datafactory.LinkedServiceAzureDatabricksNewClusterConfigArgs(
        node_type="Standard_NC12",
        cluster_version="5.5.x-gpu-scala2.11",
        min_number_of_workers=1,
        max_number_of_workers=5,
        driver_node_type="Standard_NC12",
        log_destination="dbfs:/logs",
        custom_tags={
            "custom_tag1": "sct_value_1",
            "custom_tag2": "sct_value_2",
        },
        spark_config={
            "config1": "value1",
            "config2": "value2",
        },
        spark_environment_variables={
            "envVar1": "value1",
            "envVar2": "value2",
        },
        init_scripts=[
            "init.sh",
            "init2.sh",
        ],
    ))
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";

const exampleResourceGroup = new azure.core.ResourceGroup("exampleResourceGroup", {location: "East US"});
//Create a Linked Service using managed identity and new cluster config
const exampleFactory = new azure.datafactory.Factory("exampleFactory", {
    location: exampleResourceGroup.location,
    resourceGroupName: exampleResourceGroup.name,
    identity: {
        type: "SystemAssigned",
    },
});
//Create a databricks instance
const exampleWorkspace = new azure.databricks.Workspace("exampleWorkspace", {
    resourceGroupName: exampleResourceGroup.name,
    location: exampleResourceGroup.location,
    sku: "standard",
});
const msiLinked = new azure.datafactory.LinkedServiceAzureDatabricks("msiLinked", {
    dataFactoryId: exampleFactory.id,
    description: "ADB Linked Service via MSI",
    adbDomain: pulumi.interpolate`https://${exampleWorkspace.workspaceUrl}`,
    msiWorkSpaceResourceId: exampleWorkspace.id,
    newClusterConfig: {
        nodeType: "Standard_NC12",
        clusterVersion: "5.5.x-gpu-scala2.11",
        minNumberOfWorkers: 1,
        maxNumberOfWorkers: 5,
        driverNodeType: "Standard_NC12",
        logDestination: "dbfs:/logs",
        customTags: {
            custom_tag1: "sct_value_1",
            custom_tag2: "sct_value_2",
        },
        sparkConfig: {
            config1: "value1",
            config2: "value2",
        },
        sparkEnvironmentVariables: {
            envVar1: "value1",
            envVar2: "value2",
        },
        initScripts: [
            "init.sh",
            "init2.sh",
        ],
    },
});
resources:
  exampleResourceGroup:
    type: azure:core:ResourceGroup
    properties:
      location: East US
  #Create a Linked Service using managed identity and new cluster config
  exampleFactory:
    type: azure:datafactory:Factory
    properties:
      location: ${exampleResourceGroup.location}
      resourceGroupName: ${exampleResourceGroup.name}
      identity:
        type: SystemAssigned
  #Create a databricks instance
  exampleWorkspace:
    type: azure:databricks:Workspace
    properties:
      resourceGroupName: ${exampleResourceGroup.name}
      location: ${exampleResourceGroup.location}
      sku: standard
  msiLinked:
    type: azure:datafactory:LinkedServiceAzureDatabricks
    properties:
      dataFactoryId: ${exampleFactory.id}
      description: ADB Linked Service via MSI
      adbDomain: https://${exampleWorkspace.workspaceUrl}
      msiWorkSpaceResourceId: ${exampleWorkspace.id}
      newClusterConfig:
        nodeType: Standard_NC12
        clusterVersion: 5.5.x-gpu-scala2.11
        minNumberOfWorkers: 1
        maxNumberOfWorkers: 5
        driverNodeType: Standard_NC12
        logDestination: dbfs:/logs
        customTags:
          custom_tag1: sct_value_1
          custom_tag2: sct_value_2
        sparkConfig:
          config1: value1
          config2: value2
        sparkEnvironmentVariables:
          envVar1: value1
          envVar2: value2
        initScripts:
          - init.sh
          - init2.sh

With Access Token & Existing Cluster

using System.Collections.Generic;
using Pulumi;
using Azure = Pulumi.Azure;

return await Deployment.RunAsync(() => 
{
    var exampleResourceGroup = new Azure.Core.ResourceGroup("exampleResourceGroup", new()
    {
        Location = "East US",
    });

    //Link to an existing cluster via access token
    var exampleFactory = new Azure.DataFactory.Factory("exampleFactory", new()
    {
        Location = exampleResourceGroup.Location,
        ResourceGroupName = exampleResourceGroup.Name,
    });

    //Create a databricks instance
    var exampleWorkspace = new Azure.DataBricks.Workspace("exampleWorkspace", new()
    {
        ResourceGroupName = exampleResourceGroup.Name,
        Location = exampleResourceGroup.Location,
        Sku = "standard",
    });

    var atLinked = new Azure.DataFactory.LinkedServiceAzureDatabricks("atLinked", new()
    {
        DataFactoryId = exampleFactory.Id,
        Description = "ADB Linked Service via Access Token",
        ExistingClusterId = "0308-201146-sly615",
        AccessToken = "SomeDatabricksAccessToken",
        AdbDomain = exampleWorkspace.WorkspaceUrl.Apply(workspaceUrl => $"https://{workspaceUrl}"),
    });

});
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/core"
	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/databricks"
	"github.com/pulumi/pulumi-azure/sdk/v5/go/azure/datafactory"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		exampleResourceGroup, err := core.NewResourceGroup(ctx, "exampleResourceGroup", &core.ResourceGroupArgs{
			Location: pulumi.String("East US"),
		})
		if err != nil {
			return err
		}
		exampleFactory, err := datafactory.NewFactory(ctx, "exampleFactory", &datafactory.FactoryArgs{
			Location:          exampleResourceGroup.Location,
			ResourceGroupName: exampleResourceGroup.Name,
		})
		if err != nil {
			return err
		}
		exampleWorkspace, err := databricks.NewWorkspace(ctx, "exampleWorkspace", &databricks.WorkspaceArgs{
			ResourceGroupName: exampleResourceGroup.Name,
			Location:          exampleResourceGroup.Location,
			Sku:               pulumi.String("standard"),
		})
		if err != nil {
			return err
		}
		_, err = datafactory.NewLinkedServiceAzureDatabricks(ctx, "atLinked", &datafactory.LinkedServiceAzureDatabricksArgs{
			DataFactoryId:     exampleFactory.ID(),
			Description:       pulumi.String("ADB Linked Service via Access Token"),
			ExistingClusterId: pulumi.String("0308-201146-sly615"),
			AccessToken:       pulumi.String("SomeDatabricksAccessToken"),
			AdbDomain: exampleWorkspace.WorkspaceUrl.ApplyT(func(workspaceUrl string) (string, error) {
				return fmt.Sprintf("https://%v", workspaceUrl), nil
			}).(pulumi.StringOutput),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.databricks.Workspace;
import com.pulumi.azure.databricks.WorkspaceArgs;
import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricks;
import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()        
            .location("East US")
            .build());

        var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()        
            .location(exampleResourceGroup.location())
            .resourceGroupName(exampleResourceGroup.name())
            .build());

        var exampleWorkspace = new Workspace("exampleWorkspace", WorkspaceArgs.builder()        
            .resourceGroupName(exampleResourceGroup.name())
            .location(exampleResourceGroup.location())
            .sku("standard")
            .build());

        var atLinked = new LinkedServiceAzureDatabricks("atLinked", LinkedServiceAzureDatabricksArgs.builder()        
            .dataFactoryId(exampleFactory.id())
            .description("ADB Linked Service via Access Token")
            .existingClusterId("0308-201146-sly615")
            .accessToken("SomeDatabricksAccessToken")
            .adbDomain(exampleWorkspace.workspaceUrl().applyValue(workspaceUrl -> String.format("https://%s", workspaceUrl)))
            .build());

    }
}
import pulumi
import pulumi_azure as azure

example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="East US")
#Link to an existing cluster via access token
example_factory = azure.datafactory.Factory("exampleFactory",
    location=example_resource_group.location,
    resource_group_name=example_resource_group.name)
#Create a databricks instance
example_workspace = azure.databricks.Workspace("exampleWorkspace",
    resource_group_name=example_resource_group.name,
    location=example_resource_group.location,
    sku="standard")
at_linked = azure.datafactory.LinkedServiceAzureDatabricks("atLinked",
    data_factory_id=example_factory.id,
    description="ADB Linked Service via Access Token",
    existing_cluster_id="0308-201146-sly615",
    access_token="SomeDatabricksAccessToken",
    adb_domain=example_workspace.workspace_url.apply(lambda workspace_url: f"https://{workspace_url}"))
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";

const exampleResourceGroup = new azure.core.ResourceGroup("exampleResourceGroup", {location: "East US"});
//Link to an existing cluster via access token
const exampleFactory = new azure.datafactory.Factory("exampleFactory", {
    location: exampleResourceGroup.location,
    resourceGroupName: exampleResourceGroup.name,
});
//Create a databricks instance
const exampleWorkspace = new azure.databricks.Workspace("exampleWorkspace", {
    resourceGroupName: exampleResourceGroup.name,
    location: exampleResourceGroup.location,
    sku: "standard",
});
const atLinked = new azure.datafactory.LinkedServiceAzureDatabricks("atLinked", {
    dataFactoryId: exampleFactory.id,
    description: "ADB Linked Service via Access Token",
    existingClusterId: "0308-201146-sly615",
    accessToken: "SomeDatabricksAccessToken",
    adbDomain: pulumi.interpolate`https://${exampleWorkspace.workspaceUrl}`,
});
resources:
  exampleResourceGroup:
    type: azure:core:ResourceGroup
    properties:
      location: East US
  #Link to an existing cluster via access token
  exampleFactory:
    type: azure:datafactory:Factory
    properties:
      location: ${exampleResourceGroup.location}
      resourceGroupName: ${exampleResourceGroup.name}
  #Create a databricks instance
  exampleWorkspace:
    type: azure:databricks:Workspace
    properties:
      resourceGroupName: ${exampleResourceGroup.name}
      location: ${exampleResourceGroup.location}
      sku: standard
  atLinked:
    type: azure:datafactory:LinkedServiceAzureDatabricks
    properties:
      dataFactoryId: ${exampleFactory.id}
      description: ADB Linked Service via Access Token
      existingClusterId: 0308-201146-sly615
      accessToken: SomeDatabricksAccessToken
      adbDomain: https://${exampleWorkspace.workspaceUrl}

Create LinkedServiceAzureDatabricks Resource

new LinkedServiceAzureDatabricks(name: string, args: LinkedServiceAzureDatabricksArgs, opts?: CustomResourceOptions);
@overload
def LinkedServiceAzureDatabricks(resource_name: str,
                                 opts: Optional[ResourceOptions] = None,
                                 access_token: Optional[str] = None,
                                 adb_domain: Optional[str] = None,
                                 additional_properties: Optional[Mapping[str, str]] = None,
                                 annotations: Optional[Sequence[str]] = None,
                                 data_factory_id: Optional[str] = None,
                                 description: Optional[str] = None,
                                 existing_cluster_id: Optional[str] = None,
                                 instance_pool: Optional[LinkedServiceAzureDatabricksInstancePoolArgs] = None,
                                 integration_runtime_name: Optional[str] = None,
                                 key_vault_password: Optional[LinkedServiceAzureDatabricksKeyVaultPasswordArgs] = None,
                                 msi_work_space_resource_id: Optional[str] = None,
                                 name: Optional[str] = None,
                                 new_cluster_config: Optional[LinkedServiceAzureDatabricksNewClusterConfigArgs] = None,
                                 parameters: Optional[Mapping[str, str]] = None)
@overload
def LinkedServiceAzureDatabricks(resource_name: str,
                                 args: LinkedServiceAzureDatabricksArgs,
                                 opts: Optional[ResourceOptions] = None)
func NewLinkedServiceAzureDatabricks(ctx *Context, name string, args LinkedServiceAzureDatabricksArgs, opts ...ResourceOption) (*LinkedServiceAzureDatabricks, error)
public LinkedServiceAzureDatabricks(string name, LinkedServiceAzureDatabricksArgs args, CustomResourceOptions? opts = null)
public LinkedServiceAzureDatabricks(String name, LinkedServiceAzureDatabricksArgs args)
public LinkedServiceAzureDatabricks(String name, LinkedServiceAzureDatabricksArgs args, CustomResourceOptions options)
type: azure:datafactory:LinkedServiceAzureDatabricks
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args LinkedServiceAzureDatabricksArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args LinkedServiceAzureDatabricksArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args LinkedServiceAzureDatabricksArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args LinkedServiceAzureDatabricksArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args LinkedServiceAzureDatabricksArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

LinkedServiceAzureDatabricks Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The LinkedServiceAzureDatabricks resource accepts the following input properties:

AdbDomain string

The domain URL of the databricks instance.

DataFactoryId string

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

AccessToken string

Authenticate to ADB via an access token.

AdditionalProperties Dictionary<string, string>

A map of additional properties to associate with the Data Factory Linked Service.

Annotations List<string>

List of tags that can be used for describing the Data Factory Linked Service.

Description string

The description for the Data Factory Linked Service.

ExistingClusterId string

The cluster_id of an existing cluster within the linked ADB instance.

InstancePool LinkedServiceAzureDatabricksInstancePoolArgs

Leverages an instance pool within the linked ADB instance as defined by instance_pool block below.

IntegrationRuntimeName string

The integration runtime reference to associate with the Data Factory Linked Service.

KeyVaultPassword LinkedServiceAzureDatabricksKeyVaultPasswordArgs

Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.

MsiWorkSpaceResourceId string

Authenticate to ADB via managed service identity.

Name string

Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.

NewClusterConfig LinkedServiceAzureDatabricksNewClusterConfigArgs

Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.

Parameters Dictionary<string, string>

A map of parameters to associate with the Data Factory Linked Service.

AdbDomain string

The domain URL of the databricks instance.

DataFactoryId string

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

AccessToken string

Authenticate to ADB via an access token.

AdditionalProperties map[string]string

A map of additional properties to associate with the Data Factory Linked Service.

Annotations []string

List of tags that can be used for describing the Data Factory Linked Service.

Description string

The description for the Data Factory Linked Service.

ExistingClusterId string

The cluster_id of an existing cluster within the linked ADB instance.

InstancePool LinkedServiceAzureDatabricksInstancePoolArgs

Leverages an instance pool within the linked ADB instance as defined by instance_pool block below.

IntegrationRuntimeName string

The integration runtime reference to associate with the Data Factory Linked Service.

KeyVaultPassword LinkedServiceAzureDatabricksKeyVaultPasswordArgs

Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.

MsiWorkSpaceResourceId string

Authenticate to ADB via managed service identity.

Name string

Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.

NewClusterConfig LinkedServiceAzureDatabricksNewClusterConfigArgs

Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.

Parameters map[string]string

A map of parameters to associate with the Data Factory Linked Service.

adbDomain String

The domain URL of the databricks instance.

dataFactoryId String

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

accessToken String

Authenticate to ADB via an access token.

additionalProperties Map<String,String>

A map of additional properties to associate with the Data Factory Linked Service.

annotations List<String>

List of tags that can be used for describing the Data Factory Linked Service.

description String

The description for the Data Factory Linked Service.

existingClusterId String

The cluster_id of an existing cluster within the linked ADB instance.

instancePool LinkedServiceAzureDatabricksInstancePoolArgs

Leverages an instance pool within the linked ADB instance as defined by instance_pool block below.

integrationRuntimeName String

The integration runtime reference to associate with the Data Factory Linked Service.

keyVaultPassword LinkedServiceAzureDatabricksKeyVaultPasswordArgs

Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.

msiWorkSpaceResourceId String

Authenticate to ADB via managed service identity.

name String

Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.

newClusterConfig LinkedServiceAzureDatabricksNewClusterConfigArgs

Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.

parameters Map<String,String>

A map of parameters to associate with the Data Factory Linked Service.

adbDomain string

The domain URL of the databricks instance.

dataFactoryId string

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

accessToken string

Authenticate to ADB via an access token.

additionalProperties {[key: string]: string}

A map of additional properties to associate with the Data Factory Linked Service.

annotations string[]

List of tags that can be used for describing the Data Factory Linked Service.

description string

The description for the Data Factory Linked Service.

existingClusterId string

The cluster_id of an existing cluster within the linked ADB instance.

instancePool LinkedServiceAzureDatabricksInstancePoolArgs

Leverages an instance pool within the linked ADB instance as defined by instance_pool block below.

integrationRuntimeName string

The integration runtime reference to associate with the Data Factory Linked Service.

keyVaultPassword LinkedServiceAzureDatabricksKeyVaultPasswordArgs

Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.

msiWorkSpaceResourceId string

Authenticate to ADB via managed service identity.

name string

Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.

newClusterConfig LinkedServiceAzureDatabricksNewClusterConfigArgs

Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.

parameters {[key: string]: string}

A map of parameters to associate with the Data Factory Linked Service.

adb_domain str

The domain URL of the databricks instance.

data_factory_id str

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

access_token str

Authenticate to ADB via an access token.

additional_properties Mapping[str, str]

A map of additional properties to associate with the Data Factory Linked Service.

annotations Sequence[str]

List of tags that can be used for describing the Data Factory Linked Service.

description str

The description for the Data Factory Linked Service.

existing_cluster_id str

The cluster_id of an existing cluster within the linked ADB instance.

instance_pool LinkedServiceAzureDatabricksInstancePoolArgs

Leverages an instance pool within the linked ADB instance as defined by instance_pool block below.

integration_runtime_name str

The integration runtime reference to associate with the Data Factory Linked Service.

key_vault_password LinkedServiceAzureDatabricksKeyVaultPasswordArgs

Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.

msi_work_space_resource_id str

Authenticate to ADB via managed service identity.

name str

Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.

new_cluster_config LinkedServiceAzureDatabricksNewClusterConfigArgs

Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.

parameters Mapping[str, str]

A map of parameters to associate with the Data Factory Linked Service.

adbDomain String

The domain URL of the databricks instance.

dataFactoryId String

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

accessToken String

Authenticate to ADB via an access token.

additionalProperties Map<String>

A map of additional properties to associate with the Data Factory Linked Service.

annotations List<String>

List of tags that can be used for describing the Data Factory Linked Service.

description String

The description for the Data Factory Linked Service.

existingClusterId String

The cluster_id of an existing cluster within the linked ADB instance.

instancePool Property Map

Leverages an instance pool within the linked ADB instance as defined by instance_pool block below.

integrationRuntimeName String

The integration runtime reference to associate with the Data Factory Linked Service.

keyVaultPassword Property Map

Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.

msiWorkSpaceResourceId String

Authenticate to ADB via managed service identity.

name String

Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.

newClusterConfig Property Map

Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.

parameters Map<String>

A map of parameters to associate with the Data Factory Linked Service.

Outputs

All input properties are implicitly available as output properties. Additionally, the LinkedServiceAzureDatabricks resource produces the following output properties:

Id string

The provider-assigned unique ID for this managed resource.

Id string

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

id string

The provider-assigned unique ID for this managed resource.

id str

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

Look up Existing LinkedServiceAzureDatabricks Resource

Get an existing LinkedServiceAzureDatabricks resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: LinkedServiceAzureDatabricksState, opts?: CustomResourceOptions): LinkedServiceAzureDatabricks
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        access_token: Optional[str] = None,
        adb_domain: Optional[str] = None,
        additional_properties: Optional[Mapping[str, str]] = None,
        annotations: Optional[Sequence[str]] = None,
        data_factory_id: Optional[str] = None,
        description: Optional[str] = None,
        existing_cluster_id: Optional[str] = None,
        instance_pool: Optional[LinkedServiceAzureDatabricksInstancePoolArgs] = None,
        integration_runtime_name: Optional[str] = None,
        key_vault_password: Optional[LinkedServiceAzureDatabricksKeyVaultPasswordArgs] = None,
        msi_work_space_resource_id: Optional[str] = None,
        name: Optional[str] = None,
        new_cluster_config: Optional[LinkedServiceAzureDatabricksNewClusterConfigArgs] = None,
        parameters: Optional[Mapping[str, str]] = None) -> LinkedServiceAzureDatabricks
func GetLinkedServiceAzureDatabricks(ctx *Context, name string, id IDInput, state *LinkedServiceAzureDatabricksState, opts ...ResourceOption) (*LinkedServiceAzureDatabricks, error)
public static LinkedServiceAzureDatabricks Get(string name, Input<string> id, LinkedServiceAzureDatabricksState? state, CustomResourceOptions? opts = null)
public static LinkedServiceAzureDatabricks get(String name, Output<String> id, LinkedServiceAzureDatabricksState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
AccessToken string

Authenticate to ADB via an access token.

AdbDomain string

The domain URL of the databricks instance.

AdditionalProperties Dictionary<string, string>

A map of additional properties to associate with the Data Factory Linked Service.

Annotations List<string>

List of tags that can be used for describing the Data Factory Linked Service.

DataFactoryId string

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

Description string

The description for the Data Factory Linked Service.

ExistingClusterId string

The cluster_id of an existing cluster within the linked ADB instance.

InstancePool LinkedServiceAzureDatabricksInstancePoolArgs

Leverages an instance pool within the linked ADB instance as defined by instance_pool block below.

IntegrationRuntimeName string

The integration runtime reference to associate with the Data Factory Linked Service.

KeyVaultPassword LinkedServiceAzureDatabricksKeyVaultPasswordArgs

Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.

MsiWorkSpaceResourceId string

Authenticate to ADB via managed service identity.

Name string

Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.

NewClusterConfig LinkedServiceAzureDatabricksNewClusterConfigArgs

Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.

Parameters Dictionary<string, string>

A map of parameters to associate with the Data Factory Linked Service.

AccessToken string

Authenticate to ADB via an access token.

AdbDomain string

The domain URL of the databricks instance.

AdditionalProperties map[string]string

A map of additional properties to associate with the Data Factory Linked Service.

Annotations []string

List of tags that can be used for describing the Data Factory Linked Service.

DataFactoryId string

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

Description string

The description for the Data Factory Linked Service.

ExistingClusterId string

The cluster_id of an existing cluster within the linked ADB instance.

InstancePool LinkedServiceAzureDatabricksInstancePoolArgs

Leverages an instance pool within the linked ADB instance as defined by instance_pool block below.

IntegrationRuntimeName string

The integration runtime reference to associate with the Data Factory Linked Service.

KeyVaultPassword LinkedServiceAzureDatabricksKeyVaultPasswordArgs

Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.

MsiWorkSpaceResourceId string

Authenticate to ADB via managed service identity.

Name string

Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.

NewClusterConfig LinkedServiceAzureDatabricksNewClusterConfigArgs

Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.

Parameters map[string]string

A map of parameters to associate with the Data Factory Linked Service.

accessToken String

Authenticate to ADB via an access token.

adbDomain String

The domain URL of the databricks instance.

additionalProperties Map<String,String>

A map of additional properties to associate with the Data Factory Linked Service.

annotations List<String>

List of tags that can be used for describing the Data Factory Linked Service.

dataFactoryId String

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

description String

The description for the Data Factory Linked Service.

existingClusterId String

The cluster_id of an existing cluster within the linked ADB instance.

instancePool LinkedServiceAzureDatabricksInstancePoolArgs

Leverages an instance pool within the linked ADB instance as defined by instance_pool block below.

integrationRuntimeName String

The integration runtime reference to associate with the Data Factory Linked Service.

keyVaultPassword LinkedServiceAzureDatabricksKeyVaultPasswordArgs

Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.

msiWorkSpaceResourceId String

Authenticate to ADB via managed service identity.

name String

Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.

newClusterConfig LinkedServiceAzureDatabricksNewClusterConfigArgs

Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.

parameters Map<String,String>

A map of parameters to associate with the Data Factory Linked Service.

accessToken string

Authenticate to ADB via an access token.

adbDomain string

The domain URL of the databricks instance.

additionalProperties {[key: string]: string}

A map of additional properties to associate with the Data Factory Linked Service.

annotations string[]

List of tags that can be used for describing the Data Factory Linked Service.

dataFactoryId string

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

description string

The description for the Data Factory Linked Service.

existingClusterId string

The cluster_id of an existing cluster within the linked ADB instance.

instancePool LinkedServiceAzureDatabricksInstancePoolArgs

Leverages an instance pool within the linked ADB instance as defined by instance_pool block below.

integrationRuntimeName string

The integration runtime reference to associate with the Data Factory Linked Service.

keyVaultPassword LinkedServiceAzureDatabricksKeyVaultPasswordArgs

Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.

msiWorkSpaceResourceId string

Authenticate to ADB via managed service identity.

name string

Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.

newClusterConfig LinkedServiceAzureDatabricksNewClusterConfigArgs

Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.

parameters {[key: string]: string}

A map of parameters to associate with the Data Factory Linked Service.

access_token str

Authenticate to ADB via an access token.

adb_domain str

The domain URL of the databricks instance.

additional_properties Mapping[str, str]

A map of additional properties to associate with the Data Factory Linked Service.

annotations Sequence[str]

List of tags that can be used for describing the Data Factory Linked Service.

data_factory_id str

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

description str

The description for the Data Factory Linked Service.

existing_cluster_id str

The cluster_id of an existing cluster within the linked ADB instance.

instance_pool LinkedServiceAzureDatabricksInstancePoolArgs

Leverages an instance pool within the linked ADB instance as defined by instance_pool block below.

integration_runtime_name str

The integration runtime reference to associate with the Data Factory Linked Service.

key_vault_password LinkedServiceAzureDatabricksKeyVaultPasswordArgs

Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.

msi_work_space_resource_id str

Authenticate to ADB via managed service identity.

name str

Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.

new_cluster_config LinkedServiceAzureDatabricksNewClusterConfigArgs

Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.

parameters Mapping[str, str]

A map of parameters to associate with the Data Factory Linked Service.

accessToken String

Authenticate to ADB via an access token.

adbDomain String

The domain URL of the databricks instance.

additionalProperties Map<String>

A map of additional properties to associate with the Data Factory Linked Service.

annotations List<String>

List of tags that can be used for describing the Data Factory Linked Service.

dataFactoryId String

The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.

description String

The description for the Data Factory Linked Service.

existingClusterId String

The cluster_id of an existing cluster within the linked ADB instance.

instancePool Property Map

Leverages an instance pool within the linked ADB instance as defined by instance_pool block below.

integrationRuntimeName String

The integration runtime reference to associate with the Data Factory Linked Service.

keyVaultPassword Property Map

Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.

msiWorkSpaceResourceId String

Authenticate to ADB via managed service identity.

name String

Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.

newClusterConfig Property Map

Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.

parameters Map<String>

A map of parameters to associate with the Data Factory Linked Service.

Supporting Types

LinkedServiceAzureDatabricksInstancePool

ClusterVersion string

Spark version of a the cluster.

InstancePoolId string

Identifier of the instance pool within the linked ADB instance.

MaxNumberOfWorkers int

The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.

MinNumberOfWorkers int

The minimum number of worker nodes. Defaults to 1.

ClusterVersion string

Spark version of a the cluster.

InstancePoolId string

Identifier of the instance pool within the linked ADB instance.

MaxNumberOfWorkers int

The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.

MinNumberOfWorkers int

The minimum number of worker nodes. Defaults to 1.

clusterVersion String

Spark version of a the cluster.

instancePoolId String

Identifier of the instance pool within the linked ADB instance.

maxNumberOfWorkers Integer

The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.

minNumberOfWorkers Integer

The minimum number of worker nodes. Defaults to 1.

clusterVersion string

Spark version of a the cluster.

instancePoolId string

Identifier of the instance pool within the linked ADB instance.

maxNumberOfWorkers number

The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.

minNumberOfWorkers number

The minimum number of worker nodes. Defaults to 1.

cluster_version str

Spark version of a the cluster.

instance_pool_id str

Identifier of the instance pool within the linked ADB instance.

max_number_of_workers int

The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.

min_number_of_workers int

The minimum number of worker nodes. Defaults to 1.

clusterVersion String

Spark version of a the cluster.

instancePoolId String

Identifier of the instance pool within the linked ADB instance.

maxNumberOfWorkers Number

The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.

minNumberOfWorkers Number

The minimum number of worker nodes. Defaults to 1.

LinkedServiceAzureDatabricksKeyVaultPassword

LinkedServiceName string

Specifies the name of an existing Key Vault Data Factory Linked Service.

SecretName string

Specifies the secret name in Azure Key Vault that stores ADB access token.

LinkedServiceName string

Specifies the name of an existing Key Vault Data Factory Linked Service.

SecretName string

Specifies the secret name in Azure Key Vault that stores ADB access token.

linkedServiceName String

Specifies the name of an existing Key Vault Data Factory Linked Service.

secretName String

Specifies the secret name in Azure Key Vault that stores ADB access token.

linkedServiceName string

Specifies the name of an existing Key Vault Data Factory Linked Service.

secretName string

Specifies the secret name in Azure Key Vault that stores ADB access token.

linked_service_name str

Specifies the name of an existing Key Vault Data Factory Linked Service.

secret_name str

Specifies the secret name in Azure Key Vault that stores ADB access token.

linkedServiceName String

Specifies the name of an existing Key Vault Data Factory Linked Service.

secretName String

Specifies the secret name in Azure Key Vault that stores ADB access token.

LinkedServiceAzureDatabricksNewClusterConfig

ClusterVersion string

Spark version of a the cluster.

NodeType string

Node type for the new cluster.

CustomTags Dictionary<string, string>

Tags for the cluster resource.

DriverNodeType string

Driver node type for the cluster.

InitScripts List<string>

User defined initialization scripts for the cluster.

LogDestination string

Location to deliver Spark driver, worker, and event logs.

MaxNumberOfWorkers int

The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.

MinNumberOfWorkers int

The minimum number of worker nodes. Defaults to 1.

SparkConfig Dictionary<string, string>

User-specified Spark configuration variables key-value pairs.

SparkEnvironmentVariables Dictionary<string, string>

User-specified Spark environment variables key-value pairs.

ClusterVersion string

Spark version of a the cluster.

NodeType string

Node type for the new cluster.

CustomTags map[string]string

Tags for the cluster resource.

DriverNodeType string

Driver node type for the cluster.

InitScripts []string

User defined initialization scripts for the cluster.

LogDestination string

Location to deliver Spark driver, worker, and event logs.

MaxNumberOfWorkers int

The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.

MinNumberOfWorkers int

The minimum number of worker nodes. Defaults to 1.

SparkConfig map[string]string

User-specified Spark configuration variables key-value pairs.

SparkEnvironmentVariables map[string]string

User-specified Spark environment variables key-value pairs.

clusterVersion String

Spark version of a the cluster.

nodeType String

Node type for the new cluster.

customTags Map<String,String>

Tags for the cluster resource.

driverNodeType String

Driver node type for the cluster.

initScripts List<String>

User defined initialization scripts for the cluster.

logDestination String

Location to deliver Spark driver, worker, and event logs.

maxNumberOfWorkers Integer

The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.

minNumberOfWorkers Integer

The minimum number of worker nodes. Defaults to 1.

sparkConfig Map<String,String>

User-specified Spark configuration variables key-value pairs.

sparkEnvironmentVariables Map<String,String>

User-specified Spark environment variables key-value pairs.

clusterVersion string

Spark version of a the cluster.

nodeType string

Node type for the new cluster.

customTags {[key: string]: string}

Tags for the cluster resource.

driverNodeType string

Driver node type for the cluster.

initScripts string[]

User defined initialization scripts for the cluster.

logDestination string

Location to deliver Spark driver, worker, and event logs.

maxNumberOfWorkers number

The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.

minNumberOfWorkers number

The minimum number of worker nodes. Defaults to 1.

sparkConfig {[key: string]: string}

User-specified Spark configuration variables key-value pairs.

sparkEnvironmentVariables {[key: string]: string}

User-specified Spark environment variables key-value pairs.

cluster_version str

Spark version of a the cluster.

node_type str

Node type for the new cluster.

custom_tags Mapping[str, str]

Tags for the cluster resource.

driver_node_type str

Driver node type for the cluster.

init_scripts Sequence[str]

User defined initialization scripts for the cluster.

log_destination str

Location to deliver Spark driver, worker, and event logs.

max_number_of_workers int

The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.

min_number_of_workers int

The minimum number of worker nodes. Defaults to 1.

spark_config Mapping[str, str]

User-specified Spark configuration variables key-value pairs.

spark_environment_variables Mapping[str, str]

User-specified Spark environment variables key-value pairs.

clusterVersion String

Spark version of a the cluster.

nodeType String

Node type for the new cluster.

customTags Map<String>

Tags for the cluster resource.

driverNodeType String

Driver node type for the cluster.

initScripts List<String>

User defined initialization scripts for the cluster.

logDestination String

Location to deliver Spark driver, worker, and event logs.

maxNumberOfWorkers Number

The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.

minNumberOfWorkers Number

The minimum number of worker nodes. Defaults to 1.

sparkConfig Map<String>

User-specified Spark configuration variables key-value pairs.

sparkEnvironmentVariables Map<String>

User-specified Spark environment variables key-value pairs.

Import

Data Factory Linked Services can be imported using the resource id, e.g.

 $ pulumi import azure:datafactory/linkedServiceAzureDatabricks:LinkedServiceAzureDatabricks example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/linkedservices/example

Package Details

Repository
https://github.com/pulumi/pulumi-azure
License
Apache-2.0
Notes

This Pulumi package is based on the azurerm Terraform Provider.