1. Packages
  2. Databricks
  3. API Docs
  4. ServicePrincipal
Databricks v1.34.0 published on Tuesday, Mar 5, 2024 by Pulumi

databricks.ServicePrincipal

Explore with Pulumi AI

databricks logo
Databricks v1.34.0 published on Tuesday, Mar 5, 2024 by Pulumi

    Directly manage Service Principals that could be added to databricks.Group in Databricks workspace or account.

    Note To assign account level service principals to workspace use databricks_mws_permission_assignment.

    Note Entitlements, like, allow_cluster_create, allow_instance_pool_create, databricks_sql_access, workspace_access applicable only for workspace-level service principals. Use databricks.Entitlements resource to assign entitlements inside a workspace to account-level service principals.

    To create service principals in the Databricks account, the provider must be configured with host = "https://accounts.cloud.databricks.com" on AWS deployments or host = "https://accounts.azuredatabricks.net" and authenticate using AAD tokens on Azure deployments

    The following resources are often used in the same context:

    • End to end workspace management guide.
    • databricks.Group to manage groups in Databricks Workspace or Account Console (for AWS deployments).
    • databricks.Group data to retrieve information about databricks.Group members, entitlements and instance profiles.
    • databricks.GroupMember to attach users and groups as group members.
    • databricks.Permissions to manage access control in Databricks workspace.
    • databricks.SqlPermissions to manage data object access control lists in Databricks workspaces for things like tables, views, databases, and more to manage secrets for the service principal (only for AWS deployments)

    Example Usage

    Creating regular service principal

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var sp = new Databricks.ServicePrincipal("sp", new()
        {
            ApplicationId = "00000000-0000-0000-0000-000000000000",
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewServicePrincipal(ctx, "sp", &databricks.ServicePrincipalArgs{
    			ApplicationId: pulumi.String("00000000-0000-0000-0000-000000000000"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.ServicePrincipal;
    import com.pulumi.databricks.ServicePrincipalArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var sp = new ServicePrincipal("sp", ServicePrincipalArgs.builder()        
                .applicationId("00000000-0000-0000-0000-000000000000")
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    sp = databricks.ServicePrincipal("sp", application_id="00000000-0000-0000-0000-000000000000")
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const sp = new databricks.ServicePrincipal("sp", {applicationId: "00000000-0000-0000-0000-000000000000"});
    
    resources:
      sp:
        type: databricks:ServicePrincipal
        properties:
          applicationId: 00000000-0000-0000-0000-000000000000
    

    databricks.Group in databricks.GroupMember resource

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var admins = Databricks.GetGroup.Invoke(new()
        {
            DisplayName = "admins",
        });
    
        var sp = new Databricks.ServicePrincipal("sp", new()
        {
            ApplicationId = "00000000-0000-0000-0000-000000000000",
        });
    
        var i_am_admin = new Databricks.GroupMember("i-am-admin", new()
        {
            GroupId = admins.Apply(getGroupResult => getGroupResult.Id),
            MemberId = sp.Id,
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		admins, err := databricks.LookupGroup(ctx, &databricks.LookupGroupArgs{
    			DisplayName: "admins",
    		}, nil)
    		if err != nil {
    			return err
    		}
    		sp, err := databricks.NewServicePrincipal(ctx, "sp", &databricks.ServicePrincipalArgs{
    			ApplicationId: pulumi.String("00000000-0000-0000-0000-000000000000"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewGroupMember(ctx, "i-am-admin", &databricks.GroupMemberArgs{
    			GroupId:  *pulumi.String(admins.Id),
    			MemberId: sp.ID(),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.DatabricksFunctions;
    import com.pulumi.databricks.inputs.GetGroupArgs;
    import com.pulumi.databricks.ServicePrincipal;
    import com.pulumi.databricks.ServicePrincipalArgs;
    import com.pulumi.databricks.GroupMember;
    import com.pulumi.databricks.GroupMemberArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var admins = DatabricksFunctions.getGroup(GetGroupArgs.builder()
                .displayName("admins")
                .build());
    
            var sp = new ServicePrincipal("sp", ServicePrincipalArgs.builder()        
                .applicationId("00000000-0000-0000-0000-000000000000")
                .build());
    
            var i_am_admin = new GroupMember("i-am-admin", GroupMemberArgs.builder()        
                .groupId(admins.applyValue(getGroupResult -> getGroupResult.id()))
                .memberId(sp.id())
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    admins = databricks.get_group(display_name="admins")
    sp = databricks.ServicePrincipal("sp", application_id="00000000-0000-0000-0000-000000000000")
    i_am_admin = databricks.GroupMember("i-am-admin",
        group_id=admins.id,
        member_id=sp.id)
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const admins = databricks.getGroup({
        displayName: "admins",
    });
    const sp = new databricks.ServicePrincipal("sp", {applicationId: "00000000-0000-0000-0000-000000000000"});
    const i_am_admin = new databricks.GroupMember("i-am-admin", {
        groupId: admins.then(admins => admins.id),
        memberId: sp.id,
    });
    
    resources:
      sp:
        type: databricks:ServicePrincipal
        properties:
          applicationId: 00000000-0000-0000-0000-000000000000
      i-am-admin:
        type: databricks:GroupMember
        properties:
          groupId: ${admins.id}
          memberId: ${sp.id}
    variables:
      admins:
        fn::invoke:
          Function: databricks:getGroup
          Arguments:
            displayName: admins
    

    Creating service principal with cluster create permissions

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var sp = new Databricks.ServicePrincipal("sp", new()
        {
            AllowClusterCreate = true,
            ApplicationId = "00000000-0000-0000-0000-000000000000",
            DisplayName = "Example service principal",
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewServicePrincipal(ctx, "sp", &databricks.ServicePrincipalArgs{
    			AllowClusterCreate: pulumi.Bool(true),
    			ApplicationId:      pulumi.String("00000000-0000-0000-0000-000000000000"),
    			DisplayName:        pulumi.String("Example service principal"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.ServicePrincipal;
    import com.pulumi.databricks.ServicePrincipalArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var sp = new ServicePrincipal("sp", ServicePrincipalArgs.builder()        
                .allowClusterCreate(true)
                .applicationId("00000000-0000-0000-0000-000000000000")
                .displayName("Example service principal")
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    sp = databricks.ServicePrincipal("sp",
        allow_cluster_create=True,
        application_id="00000000-0000-0000-0000-000000000000",
        display_name="Example service principal")
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const sp = new databricks.ServicePrincipal("sp", {
        allowClusterCreate: true,
        applicationId: "00000000-0000-0000-0000-000000000000",
        displayName: "Example service principal",
    });
    
    resources:
      sp:
        type: databricks:ServicePrincipal
        properties:
          allowClusterCreate: true
          applicationId: 00000000-0000-0000-0000-000000000000
          displayName: Example service principal
    

    Creating service principal in AWS Databricks account

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        // initialize provider at account-level
        var mws = new Databricks.Provider("mws", new()
        {
            Host = "https://accounts.cloud.databricks.com",
            AccountId = "00000000-0000-0000-0000-000000000000",
            ClientId = @var.Client_id,
            ClientSecret = @var.Client_secret,
        });
    
        var sp = new Databricks.ServicePrincipal("sp", new()
        {
            DisplayName = "Automation-only SP",
        }, new CustomResourceOptions
        {
            Provider = databricks.Mws,
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		// initialize provider at account-level
    		_, err := databricks.NewProvider(ctx, "mws", &databricks.ProviderArgs{
    			Host:         pulumi.String("https://accounts.cloud.databricks.com"),
    			AccountId:    pulumi.String("00000000-0000-0000-0000-000000000000"),
    			ClientId:     pulumi.Any(_var.Client_id),
    			ClientSecret: pulumi.Any(_var.Client_secret),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewServicePrincipal(ctx, "sp", &databricks.ServicePrincipalArgs{
    			DisplayName: pulumi.String("Automation-only SP"),
    		}, pulumi.Provider(databricks.Mws))
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.Provider;
    import com.pulumi.databricks.ProviderArgs;
    import com.pulumi.databricks.ServicePrincipal;
    import com.pulumi.databricks.ServicePrincipalArgs;
    import com.pulumi.resources.CustomResourceOptions;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var mws = new Provider("mws", ProviderArgs.builder()        
                .host("https://accounts.cloud.databricks.com")
                .accountId("00000000-0000-0000-0000-000000000000")
                .clientId(var_.client_id())
                .clientSecret(var_.client_secret())
                .build());
    
            var sp = new ServicePrincipal("sp", ServicePrincipalArgs.builder()        
                .displayName("Automation-only SP")
                .build(), CustomResourceOptions.builder()
                    .provider(databricks.mws())
                    .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    # initialize provider at account-level
    mws = databricks.Provider("mws",
        host="https://accounts.cloud.databricks.com",
        account_id="00000000-0000-0000-0000-000000000000",
        client_id=var["client_id"],
        client_secret=var["client_secret"])
    sp = databricks.ServicePrincipal("sp", display_name="Automation-only SP",
    opts=pulumi.ResourceOptions(provider=databricks["mws"]))
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    // initialize provider at account-level
    const mws = new databricks.Provider("mws", {
        host: "https://accounts.cloud.databricks.com",
        accountId: "00000000-0000-0000-0000-000000000000",
        clientId: _var.client_id,
        clientSecret: _var.client_secret,
    });
    const sp = new databricks.ServicePrincipal("sp", {displayName: "Automation-only SP"}, {
        provider: databricks.mws,
    });
    
    resources:
      # initialize provider at account-level
      mws:
        type: pulumi:providers:databricks
        properties:
          host: https://accounts.cloud.databricks.com
          accountId: 00000000-0000-0000-0000-000000000000
          clientId: ${var.client_id}
          clientSecret: ${var.client_secret}
      sp:
        type: databricks:ServicePrincipal
        properties:
          displayName: Automation-only SP
        options:
          provider: ${databricks.mws}
    

    Creating service principal in Azure Databricks account

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        // initialize provider at Azure account-level
        var azureAccount = new Databricks.Provider("azureAccount", new()
        {
            Host = "https://accounts.azuredatabricks.net",
            AccountId = "00000000-0000-0000-0000-000000000000",
            AuthType = "azure-cli",
        });
    
        var sp = new Databricks.ServicePrincipal("sp", new()
        {
            ApplicationId = "00000000-0000-0000-0000-000000000000",
        }, new CustomResourceOptions
        {
            Provider = databricks.Azure_account,
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		// initialize provider at Azure account-level
    		_, err := databricks.NewProvider(ctx, "azureAccount", &databricks.ProviderArgs{
    			Host:      pulumi.String("https://accounts.azuredatabricks.net"),
    			AccountId: pulumi.String("00000000-0000-0000-0000-000000000000"),
    			AuthType:  pulumi.String("azure-cli"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewServicePrincipal(ctx, "sp", &databricks.ServicePrincipalArgs{
    			ApplicationId: pulumi.String("00000000-0000-0000-0000-000000000000"),
    		}, pulumi.Provider(databricks.Azure_account))
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.Provider;
    import com.pulumi.databricks.ProviderArgs;
    import com.pulumi.databricks.ServicePrincipal;
    import com.pulumi.databricks.ServicePrincipalArgs;
    import com.pulumi.resources.CustomResourceOptions;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var azureAccount = new Provider("azureAccount", ProviderArgs.builder()        
                .host("https://accounts.azuredatabricks.net")
                .accountId("00000000-0000-0000-0000-000000000000")
                .authType("azure-cli")
                .build());
    
            var sp = new ServicePrincipal("sp", ServicePrincipalArgs.builder()        
                .applicationId("00000000-0000-0000-0000-000000000000")
                .build(), CustomResourceOptions.builder()
                    .provider(databricks.azure_account())
                    .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    # initialize provider at Azure account-level
    azure_account = databricks.Provider("azureAccount",
        host="https://accounts.azuredatabricks.net",
        account_id="00000000-0000-0000-0000-000000000000",
        auth_type="azure-cli")
    sp = databricks.ServicePrincipal("sp", application_id="00000000-0000-0000-0000-000000000000",
    opts=pulumi.ResourceOptions(provider=databricks["azure_account"]))
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    // initialize provider at Azure account-level
    const azureAccount = new databricks.Provider("azureAccount", {
        host: "https://accounts.azuredatabricks.net",
        accountId: "00000000-0000-0000-0000-000000000000",
        authType: "azure-cli",
    });
    const sp = new databricks.ServicePrincipal("sp", {applicationId: "00000000-0000-0000-0000-000000000000"}, {
        provider: databricks.azure_account,
    });
    
    resources:
      # initialize provider at Azure account-level
      azureAccount:
        type: pulumi:providers:databricks
        properties:
          host: https://accounts.azuredatabricks.net
          accountId: 00000000-0000-0000-0000-000000000000
          authType: azure-cli
      sp:
        type: databricks:ServicePrincipal
        properties:
          applicationId: 00000000-0000-0000-0000-000000000000
        options:
          provider: ${databricks.azure_account}
    

    Create ServicePrincipal Resource

    new ServicePrincipal(name: string, args?: ServicePrincipalArgs, opts?: CustomResourceOptions);
    @overload
    def ServicePrincipal(resource_name: str,
                         opts: Optional[ResourceOptions] = None,
                         acl_principal_id: Optional[str] = None,
                         active: Optional[bool] = None,
                         allow_cluster_create: Optional[bool] = None,
                         allow_instance_pool_create: Optional[bool] = None,
                         application_id: Optional[str] = None,
                         databricks_sql_access: Optional[bool] = None,
                         disable_as_user_deletion: Optional[bool] = None,
                         display_name: Optional[str] = None,
                         external_id: Optional[str] = None,
                         force: Optional[bool] = None,
                         force_delete_home_dir: Optional[bool] = None,
                         force_delete_repos: Optional[bool] = None,
                         home: Optional[str] = None,
                         repos: Optional[str] = None,
                         workspace_access: Optional[bool] = None)
    @overload
    def ServicePrincipal(resource_name: str,
                         args: Optional[ServicePrincipalArgs] = None,
                         opts: Optional[ResourceOptions] = None)
    func NewServicePrincipal(ctx *Context, name string, args *ServicePrincipalArgs, opts ...ResourceOption) (*ServicePrincipal, error)
    public ServicePrincipal(string name, ServicePrincipalArgs? args = null, CustomResourceOptions? opts = null)
    public ServicePrincipal(String name, ServicePrincipalArgs args)
    public ServicePrincipal(String name, ServicePrincipalArgs args, CustomResourceOptions options)
    
    type: databricks:ServicePrincipal
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    
    name string
    The unique name of the resource.
    args ServicePrincipalArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args ServicePrincipalArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args ServicePrincipalArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args ServicePrincipalArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args ServicePrincipalArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    ServicePrincipal Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The ServicePrincipal resource accepts the following input properties:

    AclPrincipalId string
    identifier for use in databricks_access_control_rule_set, e.g. servicePrincipals/00000000-0000-0000-0000-000000000000.
    Active bool
    Either service principal is active or not. True by default, but can be set to false in case of service principal deactivation with preserving service principal assets.
    AllowClusterCreate bool
    Allow the service principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within the boundaries of that specific policy.
    AllowInstancePoolCreate bool
    Allow the service principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    ApplicationId string
    This is the Azure Application ID of the given Azure service principal and will be their form of access and identity. On other clouds than Azure this value is auto-generated.
    DatabricksSqlAccess bool
    This is a field to allow the group to have access to Databricks SQL feature through databricks_sql_endpoint.
    DisableAsUserDeletion bool
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    DisplayName string
    This is an alias for the service principal and can be the full name of the service principal.
    ExternalId string
    ID of the service principal in an external identity provider.
    Force bool
    ForceDeleteHomeDir bool
    This flag determines whether the service principal's home directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    ForceDeleteRepos bool
    This flag determines whether the service principal's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    Home string
    Home folder of the service principal, e.g. /Users/00000000-0000-0000-0000-000000000000.
    Repos string
    Personal Repos location of the service principal, e.g. /Repos/00000000-0000-0000-0000-000000000000.
    WorkspaceAccess bool
    This is a field to allow the group to have access to Databricks Workspace.
    AclPrincipalId string
    identifier for use in databricks_access_control_rule_set, e.g. servicePrincipals/00000000-0000-0000-0000-000000000000.
    Active bool
    Either service principal is active or not. True by default, but can be set to false in case of service principal deactivation with preserving service principal assets.
    AllowClusterCreate bool
    Allow the service principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within the boundaries of that specific policy.
    AllowInstancePoolCreate bool
    Allow the service principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    ApplicationId string
    This is the Azure Application ID of the given Azure service principal and will be their form of access and identity. On other clouds than Azure this value is auto-generated.
    DatabricksSqlAccess bool
    This is a field to allow the group to have access to Databricks SQL feature through databricks_sql_endpoint.
    DisableAsUserDeletion bool
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    DisplayName string
    This is an alias for the service principal and can be the full name of the service principal.
    ExternalId string
    ID of the service principal in an external identity provider.
    Force bool
    ForceDeleteHomeDir bool
    This flag determines whether the service principal's home directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    ForceDeleteRepos bool
    This flag determines whether the service principal's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    Home string
    Home folder of the service principal, e.g. /Users/00000000-0000-0000-0000-000000000000.
    Repos string
    Personal Repos location of the service principal, e.g. /Repos/00000000-0000-0000-0000-000000000000.
    WorkspaceAccess bool
    This is a field to allow the group to have access to Databricks Workspace.
    aclPrincipalId String
    identifier for use in databricks_access_control_rule_set, e.g. servicePrincipals/00000000-0000-0000-0000-000000000000.
    active Boolean
    Either service principal is active or not. True by default, but can be set to false in case of service principal deactivation with preserving service principal assets.
    allowClusterCreate Boolean
    Allow the service principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within the boundaries of that specific policy.
    allowInstancePoolCreate Boolean
    Allow the service principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    applicationId String
    This is the Azure Application ID of the given Azure service principal and will be their form of access and identity. On other clouds than Azure this value is auto-generated.
    databricksSqlAccess Boolean
    This is a field to allow the group to have access to Databricks SQL feature through databricks_sql_endpoint.
    disableAsUserDeletion Boolean
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    displayName String
    This is an alias for the service principal and can be the full name of the service principal.
    externalId String
    ID of the service principal in an external identity provider.
    force Boolean
    forceDeleteHomeDir Boolean
    This flag determines whether the service principal's home directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    forceDeleteRepos Boolean
    This flag determines whether the service principal's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home String
    Home folder of the service principal, e.g. /Users/00000000-0000-0000-0000-000000000000.
    repos String
    Personal Repos location of the service principal, e.g. /Repos/00000000-0000-0000-0000-000000000000.
    workspaceAccess Boolean
    This is a field to allow the group to have access to Databricks Workspace.
    aclPrincipalId string
    identifier for use in databricks_access_control_rule_set, e.g. servicePrincipals/00000000-0000-0000-0000-000000000000.
    active boolean
    Either service principal is active or not. True by default, but can be set to false in case of service principal deactivation with preserving service principal assets.
    allowClusterCreate boolean
    Allow the service principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within the boundaries of that specific policy.
    allowInstancePoolCreate boolean
    Allow the service principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    applicationId string
    This is the Azure Application ID of the given Azure service principal and will be their form of access and identity. On other clouds than Azure this value is auto-generated.
    databricksSqlAccess boolean
    This is a field to allow the group to have access to Databricks SQL feature through databricks_sql_endpoint.
    disableAsUserDeletion boolean
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    displayName string
    This is an alias for the service principal and can be the full name of the service principal.
    externalId string
    ID of the service principal in an external identity provider.
    force boolean
    forceDeleteHomeDir boolean
    This flag determines whether the service principal's home directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    forceDeleteRepos boolean
    This flag determines whether the service principal's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home string
    Home folder of the service principal, e.g. /Users/00000000-0000-0000-0000-000000000000.
    repos string
    Personal Repos location of the service principal, e.g. /Repos/00000000-0000-0000-0000-000000000000.
    workspaceAccess boolean
    This is a field to allow the group to have access to Databricks Workspace.
    acl_principal_id str
    identifier for use in databricks_access_control_rule_set, e.g. servicePrincipals/00000000-0000-0000-0000-000000000000.
    active bool
    Either service principal is active or not. True by default, but can be set to false in case of service principal deactivation with preserving service principal assets.
    allow_cluster_create bool
    Allow the service principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within the boundaries of that specific policy.
    allow_instance_pool_create bool
    Allow the service principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    application_id str
    This is the Azure Application ID of the given Azure service principal and will be their form of access and identity. On other clouds than Azure this value is auto-generated.
    databricks_sql_access bool
    This is a field to allow the group to have access to Databricks SQL feature through databricks_sql_endpoint.
    disable_as_user_deletion bool
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    display_name str
    This is an alias for the service principal and can be the full name of the service principal.
    external_id str
    ID of the service principal in an external identity provider.
    force bool
    force_delete_home_dir bool
    This flag determines whether the service principal's home directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    force_delete_repos bool
    This flag determines whether the service principal's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home str
    Home folder of the service principal, e.g. /Users/00000000-0000-0000-0000-000000000000.
    repos str
    Personal Repos location of the service principal, e.g. /Repos/00000000-0000-0000-0000-000000000000.
    workspace_access bool
    This is a field to allow the group to have access to Databricks Workspace.
    aclPrincipalId String
    identifier for use in databricks_access_control_rule_set, e.g. servicePrincipals/00000000-0000-0000-0000-000000000000.
    active Boolean
    Either service principal is active or not. True by default, but can be set to false in case of service principal deactivation with preserving service principal assets.
    allowClusterCreate Boolean
    Allow the service principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within the boundaries of that specific policy.
    allowInstancePoolCreate Boolean
    Allow the service principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    applicationId String
    This is the Azure Application ID of the given Azure service principal and will be their form of access and identity. On other clouds than Azure this value is auto-generated.
    databricksSqlAccess Boolean
    This is a field to allow the group to have access to Databricks SQL feature through databricks_sql_endpoint.
    disableAsUserDeletion Boolean
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    displayName String
    This is an alias for the service principal and can be the full name of the service principal.
    externalId String
    ID of the service principal in an external identity provider.
    force Boolean
    forceDeleteHomeDir Boolean
    This flag determines whether the service principal's home directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    forceDeleteRepos Boolean
    This flag determines whether the service principal's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home String
    Home folder of the service principal, e.g. /Users/00000000-0000-0000-0000-000000000000.
    repos String
    Personal Repos location of the service principal, e.g. /Repos/00000000-0000-0000-0000-000000000000.
    workspaceAccess Boolean
    This is a field to allow the group to have access to Databricks Workspace.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the ServicePrincipal resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing ServicePrincipal Resource

    Get an existing ServicePrincipal resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: ServicePrincipalState, opts?: CustomResourceOptions): ServicePrincipal
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            acl_principal_id: Optional[str] = None,
            active: Optional[bool] = None,
            allow_cluster_create: Optional[bool] = None,
            allow_instance_pool_create: Optional[bool] = None,
            application_id: Optional[str] = None,
            databricks_sql_access: Optional[bool] = None,
            disable_as_user_deletion: Optional[bool] = None,
            display_name: Optional[str] = None,
            external_id: Optional[str] = None,
            force: Optional[bool] = None,
            force_delete_home_dir: Optional[bool] = None,
            force_delete_repos: Optional[bool] = None,
            home: Optional[str] = None,
            repos: Optional[str] = None,
            workspace_access: Optional[bool] = None) -> ServicePrincipal
    func GetServicePrincipal(ctx *Context, name string, id IDInput, state *ServicePrincipalState, opts ...ResourceOption) (*ServicePrincipal, error)
    public static ServicePrincipal Get(string name, Input<string> id, ServicePrincipalState? state, CustomResourceOptions? opts = null)
    public static ServicePrincipal get(String name, Output<String> id, ServicePrincipalState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AclPrincipalId string
    identifier for use in databricks_access_control_rule_set, e.g. servicePrincipals/00000000-0000-0000-0000-000000000000.
    Active bool
    Either service principal is active or not. True by default, but can be set to false in case of service principal deactivation with preserving service principal assets.
    AllowClusterCreate bool
    Allow the service principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within the boundaries of that specific policy.
    AllowInstancePoolCreate bool
    Allow the service principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    ApplicationId string
    This is the Azure Application ID of the given Azure service principal and will be their form of access and identity. On other clouds than Azure this value is auto-generated.
    DatabricksSqlAccess bool
    This is a field to allow the group to have access to Databricks SQL feature through databricks_sql_endpoint.
    DisableAsUserDeletion bool
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    DisplayName string
    This is an alias for the service principal and can be the full name of the service principal.
    ExternalId string
    ID of the service principal in an external identity provider.
    Force bool
    ForceDeleteHomeDir bool
    This flag determines whether the service principal's home directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    ForceDeleteRepos bool
    This flag determines whether the service principal's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    Home string
    Home folder of the service principal, e.g. /Users/00000000-0000-0000-0000-000000000000.
    Repos string
    Personal Repos location of the service principal, e.g. /Repos/00000000-0000-0000-0000-000000000000.
    WorkspaceAccess bool
    This is a field to allow the group to have access to Databricks Workspace.
    AclPrincipalId string
    identifier for use in databricks_access_control_rule_set, e.g. servicePrincipals/00000000-0000-0000-0000-000000000000.
    Active bool
    Either service principal is active or not. True by default, but can be set to false in case of service principal deactivation with preserving service principal assets.
    AllowClusterCreate bool
    Allow the service principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within the boundaries of that specific policy.
    AllowInstancePoolCreate bool
    Allow the service principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    ApplicationId string
    This is the Azure Application ID of the given Azure service principal and will be their form of access and identity. On other clouds than Azure this value is auto-generated.
    DatabricksSqlAccess bool
    This is a field to allow the group to have access to Databricks SQL feature through databricks_sql_endpoint.
    DisableAsUserDeletion bool
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    DisplayName string
    This is an alias for the service principal and can be the full name of the service principal.
    ExternalId string
    ID of the service principal in an external identity provider.
    Force bool
    ForceDeleteHomeDir bool
    This flag determines whether the service principal's home directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    ForceDeleteRepos bool
    This flag determines whether the service principal's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    Home string
    Home folder of the service principal, e.g. /Users/00000000-0000-0000-0000-000000000000.
    Repos string
    Personal Repos location of the service principal, e.g. /Repos/00000000-0000-0000-0000-000000000000.
    WorkspaceAccess bool
    This is a field to allow the group to have access to Databricks Workspace.
    aclPrincipalId String
    identifier for use in databricks_access_control_rule_set, e.g. servicePrincipals/00000000-0000-0000-0000-000000000000.
    active Boolean
    Either service principal is active or not. True by default, but can be set to false in case of service principal deactivation with preserving service principal assets.
    allowClusterCreate Boolean
    Allow the service principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within the boundaries of that specific policy.
    allowInstancePoolCreate Boolean
    Allow the service principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    applicationId String
    This is the Azure Application ID of the given Azure service principal and will be their form of access and identity. On other clouds than Azure this value is auto-generated.
    databricksSqlAccess Boolean
    This is a field to allow the group to have access to Databricks SQL feature through databricks_sql_endpoint.
    disableAsUserDeletion Boolean
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    displayName String
    This is an alias for the service principal and can be the full name of the service principal.
    externalId String
    ID of the service principal in an external identity provider.
    force Boolean
    forceDeleteHomeDir Boolean
    This flag determines whether the service principal's home directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    forceDeleteRepos Boolean
    This flag determines whether the service principal's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home String
    Home folder of the service principal, e.g. /Users/00000000-0000-0000-0000-000000000000.
    repos String
    Personal Repos location of the service principal, e.g. /Repos/00000000-0000-0000-0000-000000000000.
    workspaceAccess Boolean
    This is a field to allow the group to have access to Databricks Workspace.
    aclPrincipalId string
    identifier for use in databricks_access_control_rule_set, e.g. servicePrincipals/00000000-0000-0000-0000-000000000000.
    active boolean
    Either service principal is active or not. True by default, but can be set to false in case of service principal deactivation with preserving service principal assets.
    allowClusterCreate boolean
    Allow the service principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within the boundaries of that specific policy.
    allowInstancePoolCreate boolean
    Allow the service principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    applicationId string
    This is the Azure Application ID of the given Azure service principal and will be their form of access and identity. On other clouds than Azure this value is auto-generated.
    databricksSqlAccess boolean
    This is a field to allow the group to have access to Databricks SQL feature through databricks_sql_endpoint.
    disableAsUserDeletion boolean
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    displayName string
    This is an alias for the service principal and can be the full name of the service principal.
    externalId string
    ID of the service principal in an external identity provider.
    force boolean
    forceDeleteHomeDir boolean
    This flag determines whether the service principal's home directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    forceDeleteRepos boolean
    This flag determines whether the service principal's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home string
    Home folder of the service principal, e.g. /Users/00000000-0000-0000-0000-000000000000.
    repos string
    Personal Repos location of the service principal, e.g. /Repos/00000000-0000-0000-0000-000000000000.
    workspaceAccess boolean
    This is a field to allow the group to have access to Databricks Workspace.
    acl_principal_id str
    identifier for use in databricks_access_control_rule_set, e.g. servicePrincipals/00000000-0000-0000-0000-000000000000.
    active bool
    Either service principal is active or not. True by default, but can be set to false in case of service principal deactivation with preserving service principal assets.
    allow_cluster_create bool
    Allow the service principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within the boundaries of that specific policy.
    allow_instance_pool_create bool
    Allow the service principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    application_id str
    This is the Azure Application ID of the given Azure service principal and will be their form of access and identity. On other clouds than Azure this value is auto-generated.
    databricks_sql_access bool
    This is a field to allow the group to have access to Databricks SQL feature through databricks_sql_endpoint.
    disable_as_user_deletion bool
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    display_name str
    This is an alias for the service principal and can be the full name of the service principal.
    external_id str
    ID of the service principal in an external identity provider.
    force bool
    force_delete_home_dir bool
    This flag determines whether the service principal's home directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    force_delete_repos bool
    This flag determines whether the service principal's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home str
    Home folder of the service principal, e.g. /Users/00000000-0000-0000-0000-000000000000.
    repos str
    Personal Repos location of the service principal, e.g. /Repos/00000000-0000-0000-0000-000000000000.
    workspace_access bool
    This is a field to allow the group to have access to Databricks Workspace.
    aclPrincipalId String
    identifier for use in databricks_access_control_rule_set, e.g. servicePrincipals/00000000-0000-0000-0000-000000000000.
    active Boolean
    Either service principal is active or not. True by default, but can be set to false in case of service principal deactivation with preserving service principal assets.
    allowClusterCreate Boolean
    Allow the service principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within the boundaries of that specific policy.
    allowInstancePoolCreate Boolean
    Allow the service principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    applicationId String
    This is the Azure Application ID of the given Azure service principal and will be their form of access and identity. On other clouds than Azure this value is auto-generated.
    databricksSqlAccess Boolean
    This is a field to allow the group to have access to Databricks SQL feature through databricks_sql_endpoint.
    disableAsUserDeletion Boolean
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    displayName String
    This is an alias for the service principal and can be the full name of the service principal.
    externalId String
    ID of the service principal in an external identity provider.
    force Boolean
    forceDeleteHomeDir Boolean
    This flag determines whether the service principal's home directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    forceDeleteRepos Boolean
    This flag determines whether the service principal's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home String
    Home folder of the service principal, e.g. /Users/00000000-0000-0000-0000-000000000000.
    repos String
    Personal Repos location of the service principal, e.g. /Repos/00000000-0000-0000-0000-000000000000.
    workspaceAccess Boolean
    This is a field to allow the group to have access to Databricks Workspace.

    Import

    The resource scim service principal can be imported using its id, for example 2345678901234567. To get the service principal ID, call Get service principals.

    bash

    $ pulumi import databricks:index/servicePrincipal:ServicePrincipal me <service-principal-id>
    

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.34.0 published on Tuesday, Mar 5, 2024 by Pulumi