1. Packages
  2. Databricks
  3. API Docs
  4. User
Databricks v1.33.1 published on Friday, Feb 23, 2024 by Pulumi

databricks.User

Explore with Pulumi AI

databricks logo
Databricks v1.33.1 published on Friday, Feb 23, 2024 by Pulumi

    This resource allows you to manage users in Databricks Workspace, Databricks Account Console or Azure Databricks Account Console. You can also associate Databricks users to databricks_group. Upon user creation the user will receive a password reset email. You can also get information about caller identity using databricks.getCurrentUser data source.

    Note To assign account level users to workspace use databricks_mws_permission_assignment.

    Note Entitlements, like, allow_cluster_create, allow_instance_pool_create, databricks_sql_access, workspace_access applicable only for workspace-level users. Use databricks.Entitlements resource to assign entitlements inside a workspace to account-level users.

    To create users in the Databricks account, the provider must be configured with host = "https://accounts.cloud.databricks.com" on AWS deployments or host = "https://accounts.azuredatabricks.net" and authenticate using AAD tokens on Azure deployments

    The following resources are often used in the same context:

    • End to end workspace management guide.
    • databricks.Group to manage groups in Databricks Workspace or Account Console (for AWS deployments).
    • databricks.Group data to retrieve information about databricks.Group members, entitlements and instance profiles.
    • databricks.GroupInstanceProfile to attach databricks.InstanceProfile (AWS) to databricks_group.
    • databricks.GroupMember to attach users and groups as group members.
    • databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
    • databricks.User data to retrieve information about databricks_user.

    Example Usage

    Creating regular user

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var me = new Databricks.User("me", new()
        {
            UserName = "me@example.com",
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewUser(ctx, "me", &databricks.UserArgs{
    			UserName: pulumi.String("me@example.com"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.User;
    import com.pulumi.databricks.UserArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var me = new User("me", UserArgs.builder()        
                .userName("me@example.com")
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    me = databricks.User("me", user_name="me@example.com")
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const me = new databricks.User("me", {userName: "me@example.com"});
    
    resources:
      me:
        type: databricks:User
        properties:
          userName: me@example.com
    

    databricks.Group in databricks.GroupMember resource

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var admins = Databricks.GetGroup.Invoke(new()
        {
            DisplayName = "admins",
        });
    
        var me = new Databricks.User("me", new()
        {
            UserName = "me@example.com",
        });
    
        var i_am_admin = new Databricks.GroupMember("i-am-admin", new()
        {
            GroupId = admins.Apply(getGroupResult => getGroupResult.Id),
            MemberId = me.Id,
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		admins, err := databricks.LookupGroup(ctx, &databricks.LookupGroupArgs{
    			DisplayName: "admins",
    		}, nil)
    		if err != nil {
    			return err
    		}
    		me, err := databricks.NewUser(ctx, "me", &databricks.UserArgs{
    			UserName: pulumi.String("me@example.com"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewGroupMember(ctx, "i-am-admin", &databricks.GroupMemberArgs{
    			GroupId:  *pulumi.String(admins.Id),
    			MemberId: me.ID(),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.DatabricksFunctions;
    import com.pulumi.databricks.inputs.GetGroupArgs;
    import com.pulumi.databricks.User;
    import com.pulumi.databricks.UserArgs;
    import com.pulumi.databricks.GroupMember;
    import com.pulumi.databricks.GroupMemberArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var admins = DatabricksFunctions.getGroup(GetGroupArgs.builder()
                .displayName("admins")
                .build());
    
            var me = new User("me", UserArgs.builder()        
                .userName("me@example.com")
                .build());
    
            var i_am_admin = new GroupMember("i-am-admin", GroupMemberArgs.builder()        
                .groupId(admins.applyValue(getGroupResult -> getGroupResult.id()))
                .memberId(me.id())
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    admins = databricks.get_group(display_name="admins")
    me = databricks.User("me", user_name="me@example.com")
    i_am_admin = databricks.GroupMember("i-am-admin",
        group_id=admins.id,
        member_id=me.id)
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const admins = databricks.getGroup({
        displayName: "admins",
    });
    const me = new databricks.User("me", {userName: "me@example.com"});
    const i_am_admin = new databricks.GroupMember("i-am-admin", {
        groupId: admins.then(admins => admins.id),
        memberId: me.id,
    });
    
    resources:
      me:
        type: databricks:User
        properties:
          userName: me@example.com
      i-am-admin:
        type: databricks:GroupMember
        properties:
          groupId: ${admins.id}
          memberId: ${me.id}
    variables:
      admins:
        fn::invoke:
          Function: databricks:getGroup
          Arguments:
            displayName: admins
    

    Creating user with cluster create permissions

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var me = new Databricks.User("me", new()
        {
            AllowClusterCreate = true,
            DisplayName = "Example user",
            UserName = "me@example.com",
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewUser(ctx, "me", &databricks.UserArgs{
    			AllowClusterCreate: pulumi.Bool(true),
    			DisplayName:        pulumi.String("Example user"),
    			UserName:           pulumi.String("me@example.com"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.User;
    import com.pulumi.databricks.UserArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var me = new User("me", UserArgs.builder()        
                .allowClusterCreate(true)
                .displayName("Example user")
                .userName("me@example.com")
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    me = databricks.User("me",
        allow_cluster_create=True,
        display_name="Example user",
        user_name="me@example.com")
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const me = new databricks.User("me", {
        allowClusterCreate: true,
        displayName: "Example user",
        userName: "me@example.com",
    });
    
    resources:
      me:
        type: databricks:User
        properties:
          allowClusterCreate: true
          displayName: Example user
          userName: me@example.com
    

    Creating user in AWS Databricks account

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        // initialize provider at account-level
        var mws = new Databricks.Provider("mws", new()
        {
            Host = "https://accounts.cloud.databricks.com",
            AccountId = "00000000-0000-0000-0000-000000000000",
            ClientId = @var.Client_id,
            ClientSecret = @var.Client_secret,
        });
    
        var accountUser = new Databricks.User("accountUser", new()
        {
            UserName = "me@example.com",
            DisplayName = "Example user",
        }, new CustomResourceOptions
        {
            Provider = databricks.Mws,
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewProvider(ctx, "mws", &databricks.ProviderArgs{
    			Host:         pulumi.String("https://accounts.cloud.databricks.com"),
    			AccountId:    pulumi.String("00000000-0000-0000-0000-000000000000"),
    			ClientId:     pulumi.Any(_var.Client_id),
    			ClientSecret: pulumi.Any(_var.Client_secret),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewUser(ctx, "accountUser", &databricks.UserArgs{
    			UserName:    pulumi.String("me@example.com"),
    			DisplayName: pulumi.String("Example user"),
    		}, pulumi.Provider(databricks.Mws))
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.Provider;
    import com.pulumi.databricks.ProviderArgs;
    import com.pulumi.databricks.User;
    import com.pulumi.databricks.UserArgs;
    import com.pulumi.resources.CustomResourceOptions;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var mws = new Provider("mws", ProviderArgs.builder()        
                .host("https://accounts.cloud.databricks.com")
                .accountId("00000000-0000-0000-0000-000000000000")
                .clientId(var_.client_id())
                .clientSecret(var_.client_secret())
                .build());
    
            var accountUser = new User("accountUser", UserArgs.builder()        
                .userName("me@example.com")
                .displayName("Example user")
                .build(), CustomResourceOptions.builder()
                    .provider(databricks.mws())
                    .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    # initialize provider at account-level
    mws = databricks.Provider("mws",
        host="https://accounts.cloud.databricks.com",
        account_id="00000000-0000-0000-0000-000000000000",
        client_id=var["client_id"],
        client_secret=var["client_secret"])
    account_user = databricks.User("accountUser",
        user_name="me@example.com",
        display_name="Example user",
        opts=pulumi.ResourceOptions(provider=databricks["mws"]))
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    // initialize provider at account-level
    const mws = new databricks.Provider("mws", {
        host: "https://accounts.cloud.databricks.com",
        accountId: "00000000-0000-0000-0000-000000000000",
        clientId: _var.client_id,
        clientSecret: _var.client_secret,
    });
    const accountUser = new databricks.User("accountUser", {
        userName: "me@example.com",
        displayName: "Example user",
    }, {
        provider: databricks.mws,
    });
    
    resources:
      # initialize provider at account-level
      mws:
        type: pulumi:providers:databricks
        properties:
          host: https://accounts.cloud.databricks.com
          accountId: 00000000-0000-0000-0000-000000000000
          clientId: ${var.client_id}
          clientSecret: ${var.client_secret}
      accountUser:
        type: databricks:User
        properties:
          userName: me@example.com
          displayName: Example user
        options:
          provider: ${databricks.mws}
    

    Creating user in Azure Databricks account

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        // initialize provider at Azure account-level
        var azureAccount = new Databricks.Provider("azureAccount", new()
        {
            Host = "https://accounts.azuredatabricks.net",
            AccountId = "00000000-0000-0000-0000-000000000000",
            AuthType = "azure-cli",
        });
    
        var accountUser = new Databricks.User("accountUser", new()
        {
            UserName = "me@example.com",
            DisplayName = "Example user",
        }, new CustomResourceOptions
        {
            Provider = databricks.Azure_account,
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewProvider(ctx, "azureAccount", &databricks.ProviderArgs{
    			Host:      pulumi.String("https://accounts.azuredatabricks.net"),
    			AccountId: pulumi.String("00000000-0000-0000-0000-000000000000"),
    			AuthType:  pulumi.String("azure-cli"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewUser(ctx, "accountUser", &databricks.UserArgs{
    			UserName:    pulumi.String("me@example.com"),
    			DisplayName: pulumi.String("Example user"),
    		}, pulumi.Provider(databricks.Azure_account))
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.Provider;
    import com.pulumi.databricks.ProviderArgs;
    import com.pulumi.databricks.User;
    import com.pulumi.databricks.UserArgs;
    import com.pulumi.resources.CustomResourceOptions;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var azureAccount = new Provider("azureAccount", ProviderArgs.builder()        
                .host("https://accounts.azuredatabricks.net")
                .accountId("00000000-0000-0000-0000-000000000000")
                .authType("azure-cli")
                .build());
    
            var accountUser = new User("accountUser", UserArgs.builder()        
                .userName("me@example.com")
                .displayName("Example user")
                .build(), CustomResourceOptions.builder()
                    .provider(databricks.azure_account())
                    .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    # initialize provider at Azure account-level
    azure_account = databricks.Provider("azureAccount",
        host="https://accounts.azuredatabricks.net",
        account_id="00000000-0000-0000-0000-000000000000",
        auth_type="azure-cli")
    account_user = databricks.User("accountUser",
        user_name="me@example.com",
        display_name="Example user",
        opts=pulumi.ResourceOptions(provider=databricks["azure_account"]))
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    // initialize provider at Azure account-level
    const azureAccount = new databricks.Provider("azureAccount", {
        host: "https://accounts.azuredatabricks.net",
        accountId: "00000000-0000-0000-0000-000000000000",
        authType: "azure-cli",
    });
    const accountUser = new databricks.User("accountUser", {
        userName: "me@example.com",
        displayName: "Example user",
    }, {
        provider: databricks.azure_account,
    });
    
    resources:
      # initialize provider at Azure account-level
      azureAccount:
        type: pulumi:providers:databricks
        properties:
          host: https://accounts.azuredatabricks.net
          accountId: 00000000-0000-0000-0000-000000000000
          authType: azure-cli
      accountUser:
        type: databricks:User
        properties:
          userName: me@example.com
          displayName: Example user
        options:
          provider: ${databricks.azure_account}
    

    Create User Resource

    new User(name: string, args: UserArgs, opts?: CustomResourceOptions);
    @overload
    def User(resource_name: str,
             opts: Optional[ResourceOptions] = None,
             acl_principal_id: Optional[str] = None,
             active: Optional[bool] = None,
             allow_cluster_create: Optional[bool] = None,
             allow_instance_pool_create: Optional[bool] = None,
             databricks_sql_access: Optional[bool] = None,
             disable_as_user_deletion: Optional[bool] = None,
             display_name: Optional[str] = None,
             external_id: Optional[str] = None,
             force: Optional[bool] = None,
             force_delete_home_dir: Optional[bool] = None,
             force_delete_repos: Optional[bool] = None,
             home: Optional[str] = None,
             repos: Optional[str] = None,
             user_name: Optional[str] = None,
             workspace_access: Optional[bool] = None)
    @overload
    def User(resource_name: str,
             args: UserArgs,
             opts: Optional[ResourceOptions] = None)
    func NewUser(ctx *Context, name string, args UserArgs, opts ...ResourceOption) (*User, error)
    public User(string name, UserArgs args, CustomResourceOptions? opts = null)
    public User(String name, UserArgs args)
    public User(String name, UserArgs args, CustomResourceOptions options)
    
    type: databricks:User
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    
    name string
    The unique name of the resource.
    args UserArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args UserArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args UserArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args UserArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args UserArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    User Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The User resource accepts the following input properties:

    UserName string
    This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
    AclPrincipalId string
    identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
    Active bool
    Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
    AllowClusterCreate bool
    Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    AllowInstancePoolCreate bool
    Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    DatabricksSqlAccess bool
    This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    DisableAsUserDeletion bool
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    DisplayName string
    This is an alias for the username that can be the full name of the user.
    ExternalId string
    ID of the user in an external identity provider.
    Force bool
    ForceDeleteHomeDir bool
    This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
    ForceDeleteRepos bool
    This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    Home string
    Home folder of the user, e.g. /Users/mr.foo@example.com.
    Repos string
    Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
    WorkspaceAccess bool
    UserName string
    This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
    AclPrincipalId string
    identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
    Active bool
    Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
    AllowClusterCreate bool
    Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    AllowInstancePoolCreate bool
    Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    DatabricksSqlAccess bool
    This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    DisableAsUserDeletion bool
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    DisplayName string
    This is an alias for the username that can be the full name of the user.
    ExternalId string
    ID of the user in an external identity provider.
    Force bool
    ForceDeleteHomeDir bool
    This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
    ForceDeleteRepos bool
    This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    Home string
    Home folder of the user, e.g. /Users/mr.foo@example.com.
    Repos string
    Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
    WorkspaceAccess bool
    userName String
    This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
    aclPrincipalId String
    identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
    active Boolean
    Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
    allowClusterCreate Boolean
    Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allowInstancePoolCreate Boolean
    Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricksSqlAccess Boolean
    This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    disableAsUserDeletion Boolean
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    displayName String
    This is an alias for the username that can be the full name of the user.
    externalId String
    ID of the user in an external identity provider.
    force Boolean
    forceDeleteHomeDir Boolean
    This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
    forceDeleteRepos Boolean
    This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home String
    Home folder of the user, e.g. /Users/mr.foo@example.com.
    repos String
    Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
    workspaceAccess Boolean
    userName string
    This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
    aclPrincipalId string
    identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
    active boolean
    Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
    allowClusterCreate boolean
    Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allowInstancePoolCreate boolean
    Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricksSqlAccess boolean
    This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    disableAsUserDeletion boolean
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    displayName string
    This is an alias for the username that can be the full name of the user.
    externalId string
    ID of the user in an external identity provider.
    force boolean
    forceDeleteHomeDir boolean
    This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
    forceDeleteRepos boolean
    This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home string
    Home folder of the user, e.g. /Users/mr.foo@example.com.
    repos string
    Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
    workspaceAccess boolean
    user_name str
    This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
    acl_principal_id str
    identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
    active bool
    Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
    allow_cluster_create bool
    Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allow_instance_pool_create bool
    Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricks_sql_access bool
    This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    disable_as_user_deletion bool
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    display_name str
    This is an alias for the username that can be the full name of the user.
    external_id str
    ID of the user in an external identity provider.
    force bool
    force_delete_home_dir bool
    This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
    force_delete_repos bool
    This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home str
    Home folder of the user, e.g. /Users/mr.foo@example.com.
    repos str
    Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
    workspace_access bool
    userName String
    This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
    aclPrincipalId String
    identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
    active Boolean
    Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
    allowClusterCreate Boolean
    Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allowInstancePoolCreate Boolean
    Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricksSqlAccess Boolean
    This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    disableAsUserDeletion Boolean
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    displayName String
    This is an alias for the username that can be the full name of the user.
    externalId String
    ID of the user in an external identity provider.
    force Boolean
    forceDeleteHomeDir Boolean
    This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
    forceDeleteRepos Boolean
    This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home String
    Home folder of the user, e.g. /Users/mr.foo@example.com.
    repos String
    Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
    workspaceAccess Boolean

    Outputs

    All input properties are implicitly available as output properties. Additionally, the User resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing User Resource

    Get an existing User resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: UserState, opts?: CustomResourceOptions): User
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            acl_principal_id: Optional[str] = None,
            active: Optional[bool] = None,
            allow_cluster_create: Optional[bool] = None,
            allow_instance_pool_create: Optional[bool] = None,
            databricks_sql_access: Optional[bool] = None,
            disable_as_user_deletion: Optional[bool] = None,
            display_name: Optional[str] = None,
            external_id: Optional[str] = None,
            force: Optional[bool] = None,
            force_delete_home_dir: Optional[bool] = None,
            force_delete_repos: Optional[bool] = None,
            home: Optional[str] = None,
            repos: Optional[str] = None,
            user_name: Optional[str] = None,
            workspace_access: Optional[bool] = None) -> User
    func GetUser(ctx *Context, name string, id IDInput, state *UserState, opts ...ResourceOption) (*User, error)
    public static User Get(string name, Input<string> id, UserState? state, CustomResourceOptions? opts = null)
    public static User get(String name, Output<String> id, UserState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AclPrincipalId string
    identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
    Active bool
    Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
    AllowClusterCreate bool
    Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    AllowInstancePoolCreate bool
    Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    DatabricksSqlAccess bool
    This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    DisableAsUserDeletion bool
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    DisplayName string
    This is an alias for the username that can be the full name of the user.
    ExternalId string
    ID of the user in an external identity provider.
    Force bool
    ForceDeleteHomeDir bool
    This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
    ForceDeleteRepos bool
    This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    Home string
    Home folder of the user, e.g. /Users/mr.foo@example.com.
    Repos string
    Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
    UserName string
    This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
    WorkspaceAccess bool
    AclPrincipalId string
    identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
    Active bool
    Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
    AllowClusterCreate bool
    Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    AllowInstancePoolCreate bool
    Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    DatabricksSqlAccess bool
    This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    DisableAsUserDeletion bool
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    DisplayName string
    This is an alias for the username that can be the full name of the user.
    ExternalId string
    ID of the user in an external identity provider.
    Force bool
    ForceDeleteHomeDir bool
    This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
    ForceDeleteRepos bool
    This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    Home string
    Home folder of the user, e.g. /Users/mr.foo@example.com.
    Repos string
    Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
    UserName string
    This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
    WorkspaceAccess bool
    aclPrincipalId String
    identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
    active Boolean
    Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
    allowClusterCreate Boolean
    Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allowInstancePoolCreate Boolean
    Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricksSqlAccess Boolean
    This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    disableAsUserDeletion Boolean
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    displayName String
    This is an alias for the username that can be the full name of the user.
    externalId String
    ID of the user in an external identity provider.
    force Boolean
    forceDeleteHomeDir Boolean
    This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
    forceDeleteRepos Boolean
    This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home String
    Home folder of the user, e.g. /Users/mr.foo@example.com.
    repos String
    Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
    userName String
    This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
    workspaceAccess Boolean
    aclPrincipalId string
    identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
    active boolean
    Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
    allowClusterCreate boolean
    Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allowInstancePoolCreate boolean
    Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricksSqlAccess boolean
    This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    disableAsUserDeletion boolean
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    displayName string
    This is an alias for the username that can be the full name of the user.
    externalId string
    ID of the user in an external identity provider.
    force boolean
    forceDeleteHomeDir boolean
    This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
    forceDeleteRepos boolean
    This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home string
    Home folder of the user, e.g. /Users/mr.foo@example.com.
    repos string
    Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
    userName string
    This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
    workspaceAccess boolean
    acl_principal_id str
    identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
    active bool
    Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
    allow_cluster_create bool
    Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allow_instance_pool_create bool
    Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricks_sql_access bool
    This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    disable_as_user_deletion bool
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    display_name str
    This is an alias for the username that can be the full name of the user.
    external_id str
    ID of the user in an external identity provider.
    force bool
    force_delete_home_dir bool
    This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
    force_delete_repos bool
    This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home str
    Home folder of the user, e.g. /Users/mr.foo@example.com.
    repos str
    Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
    user_name str
    This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
    workspace_access bool
    aclPrincipalId String
    identifier for use in databricks_access_control_rule_set, e.g. users/mr.foo@example.com.
    active Boolean
    Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
    allowClusterCreate Boolean
    Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allowInstancePoolCreate Boolean
    Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricksSqlAccess Boolean
    This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    disableAsUserDeletion Boolean
    When deleting a user, set the user's active flag to false instead of actually deleting the user. This flag is exclusive to force_delete_repos and force_delete_home_dir flags. True by default for accounts SCIM API, false otherwise.
    displayName String
    This is an alias for the username that can be the full name of the user.
    externalId String
    ID of the user in an external identity provider.
    force Boolean
    forceDeleteHomeDir Boolean
    This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
    forceDeleteRepos Boolean
    This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
    home String
    Home folder of the user, e.g. /Users/mr.foo@example.com.
    repos String
    Personal Repos location of the user, e.g. /Repos/mr.foo@example.com.
    userName String
    This is the username of the given user and will be their form of access and identity. Provided username will be converted to lower case if it contains upper case characters.
    workspaceAccess Boolean

    Import

    The resource scim user can be imported using id:

    bash

    $ pulumi import databricks:index/user:User me <user-id>
    

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.33.1 published on Friday, Feb 23, 2024 by Pulumi