databricks.User
Explore with Pulumi AI
This resource allows you to manage users in Databricks Workspace, Databricks Account Console or Azure Databricks Account Console. You can also associate Databricks users to databricks_group. Upon user creation the user will receive a password reset email. You can also get information about caller identity using databricks.getCurrentUser data source.
Note To assign account level users to workspace use databricks_mws_permission_assignment.
To create users in the Databricks account, the provider must be configured with host = "https://accounts.cloud.databricks.com"
on AWS deployments or host = "https://accounts.azuredatabricks.net"
and authenticate using AAD tokens on Azure deployments
Related Resources
The following resources are often used in the same context:
- End to end workspace management guide.
- databricks.Group to manage groups in Databricks Workspace or Account Console (for AWS deployments).
- databricks.Group data to retrieve information about databricks.Group members, entitlements and instance profiles.
- databricks.GroupInstanceProfile to attach databricks.InstanceProfile (AWS) to databricks_group.
- databricks.GroupMember to attach users and groups as group members.
- databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
- databricks.User data to retrieve information about databricks_user.
Example Usage
Creating regular user
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var me = new Databricks.User("me", new()
{
UserName = "me@example.com",
});
});
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewUser(ctx, "me", &databricks.UserArgs{
UserName: pulumi.String("me@example.com"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var me = new User("me", UserArgs.builder()
.userName("me@example.com")
.build());
}
}
import pulumi
import pulumi_databricks as databricks
me = databricks.User("me", user_name="me@example.com")
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const me = new databricks.User("me", {userName: "me@example.com"});
resources:
me:
type: databricks:User
properties:
userName: me@example.com
databricks.Group in databricks.GroupMember resource
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var admins = Databricks.GetGroup.Invoke(new()
{
DisplayName = "admins",
});
var me = new Databricks.User("me", new()
{
UserName = "me@example.com",
});
var i_am_admin = new Databricks.GroupMember("i-am-admin", new()
{
GroupId = admins.Apply(getGroupResult => getGroupResult.Id),
MemberId = me.Id,
});
});
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
admins, err := databricks.LookupGroup(ctx, &databricks.LookupGroupArgs{
DisplayName: "admins",
}, nil)
if err != nil {
return err
}
me, err := databricks.NewUser(ctx, "me", &databricks.UserArgs{
UserName: pulumi.String("me@example.com"),
})
if err != nil {
return err
}
_, err = databricks.NewGroupMember(ctx, "i-am-admin", &databricks.GroupMemberArgs{
GroupId: *pulumi.String(admins.Id),
MemberId: me.ID(),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DatabricksFunctions;
import com.pulumi.databricks.inputs.GetGroupArgs;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import com.pulumi.databricks.GroupMember;
import com.pulumi.databricks.GroupMemberArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var admins = DatabricksFunctions.getGroup(GetGroupArgs.builder()
.displayName("admins")
.build());
var me = new User("me", UserArgs.builder()
.userName("me@example.com")
.build());
var i_am_admin = new GroupMember("i-am-admin", GroupMemberArgs.builder()
.groupId(admins.applyValue(getGroupResult -> getGroupResult.id()))
.memberId(me.id())
.build());
}
}
import pulumi
import pulumi_databricks as databricks
admins = databricks.get_group(display_name="admins")
me = databricks.User("me", user_name="me@example.com")
i_am_admin = databricks.GroupMember("i-am-admin",
group_id=admins.id,
member_id=me.id)
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const admins = databricks.getGroup({
displayName: "admins",
});
const me = new databricks.User("me", {userName: "me@example.com"});
const i_am_admin = new databricks.GroupMember("i-am-admin", {
groupId: admins.then(admins => admins.id),
memberId: me.id,
});
resources:
me:
type: databricks:User
properties:
userName: me@example.com
i-am-admin:
type: databricks:GroupMember
properties:
groupId: ${admins.id}
memberId: ${me.id}
variables:
admins:
fn::invoke:
Function: databricks:getGroup
Arguments:
displayName: admins
Creating user with cluster create permissions
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var me = new Databricks.User("me", new()
{
AllowClusterCreate = true,
DisplayName = "Example user",
UserName = "me@example.com",
});
});
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewUser(ctx, "me", &databricks.UserArgs{
AllowClusterCreate: pulumi.Bool(true),
DisplayName: pulumi.String("Example user"),
UserName: pulumi.String("me@example.com"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var me = new User("me", UserArgs.builder()
.allowClusterCreate(true)
.displayName("Example user")
.userName("me@example.com")
.build());
}
}
import pulumi
import pulumi_databricks as databricks
me = databricks.User("me",
allow_cluster_create=True,
display_name="Example user",
user_name="me@example.com")
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const me = new databricks.User("me", {
allowClusterCreate: true,
displayName: "Example user",
userName: "me@example.com",
});
resources:
me:
type: databricks:User
properties:
allowClusterCreate: true
displayName: Example user
userName: me@example.com
Creating user in AWS Databricks account
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
// initialize provider at account-level
var mws = new Databricks.Provider("mws", new()
{
Host = "https://accounts.cloud.databricks.com",
AccountId = "00000000-0000-0000-0000-000000000000",
Username = @var.Databricks_account_username,
Password = @var.Databricks_account_password,
});
var accountUser = new Databricks.User("accountUser", new()
{
UserName = "me@example.com",
DisplayName = "Example user",
}, new CustomResourceOptions
{
Provider = databricks.Mws,
});
});
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewProvider(ctx, "mws", &databricks.ProviderArgs{
Host: pulumi.String("https://accounts.cloud.databricks.com"),
AccountId: pulumi.String("00000000-0000-0000-0000-000000000000"),
Username: pulumi.Any(_var.Databricks_account_username),
Password: pulumi.Any(_var.Databricks_account_password),
})
if err != nil {
return err
}
_, err = databricks.NewUser(ctx, "accountUser", &databricks.UserArgs{
UserName: pulumi.String("me@example.com"),
DisplayName: pulumi.String("Example user"),
}, pulumi.Provider(databricks.Mws))
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Provider;
import com.pulumi.databricks.ProviderArgs;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var mws = new Provider("mws", ProviderArgs.builder()
.host("https://accounts.cloud.databricks.com")
.accountId("00000000-0000-0000-0000-000000000000")
.username(var_.databricks_account_username())
.password(var_.databricks_account_password())
.build());
var accountUser = new User("accountUser", UserArgs.builder()
.userName("me@example.com")
.displayName("Example user")
.build(), CustomResourceOptions.builder()
.provider(databricks.mws())
.build());
}
}
import pulumi
import pulumi_databricks as databricks
# initialize provider at account-level
mws = databricks.Provider("mws",
host="https://accounts.cloud.databricks.com",
account_id="00000000-0000-0000-0000-000000000000",
username=var["databricks_account_username"],
password=var["databricks_account_password"])
account_user = databricks.User("accountUser",
user_name="me@example.com",
display_name="Example user",
opts=pulumi.ResourceOptions(provider=databricks["mws"]))
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
// initialize provider at account-level
const mws = new databricks.Provider("mws", {
host: "https://accounts.cloud.databricks.com",
accountId: "00000000-0000-0000-0000-000000000000",
username: _var.databricks_account_username,
password: _var.databricks_account_password,
});
const accountUser = new databricks.User("accountUser", {
userName: "me@example.com",
displayName: "Example user",
}, {
provider: databricks.mws,
});
resources:
# initialize provider at account-level
mws:
type: pulumi:providers:databricks
properties:
host: https://accounts.cloud.databricks.com
accountId: 00000000-0000-0000-0000-000000000000
username: ${var.databricks_account_username}
password: ${var.databricks_account_password}
accountUser:
type: databricks:User
properties:
userName: me@example.com
displayName: Example user
options:
provider: ${databricks.mws}
Creating user in Azure Databricks account
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
// initialize provider at Azure account-level
var azureAccount = new Databricks.Provider("azureAccount", new()
{
Host = "https://accounts.azuredatabricks.net",
AccountId = "00000000-0000-0000-0000-000000000000",
AuthType = "azure-cli",
});
var accountUser = new Databricks.User("accountUser", new()
{
UserName = "me@example.com",
DisplayName = "Example user",
}, new CustomResourceOptions
{
Provider = databricks.Azure_account,
});
});
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewProvider(ctx, "azureAccount", &databricks.ProviderArgs{
Host: pulumi.String("https://accounts.azuredatabricks.net"),
AccountId: pulumi.String("00000000-0000-0000-0000-000000000000"),
AuthType: pulumi.String("azure-cli"),
})
if err != nil {
return err
}
_, err = databricks.NewUser(ctx, "accountUser", &databricks.UserArgs{
UserName: pulumi.String("me@example.com"),
DisplayName: pulumi.String("Example user"),
}, pulumi.Provider(databricks.Azure_account))
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Provider;
import com.pulumi.databricks.ProviderArgs;
import com.pulumi.databricks.User;
import com.pulumi.databricks.UserArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var azureAccount = new Provider("azureAccount", ProviderArgs.builder()
.host("https://accounts.azuredatabricks.net")
.accountId("00000000-0000-0000-0000-000000000000")
.authType("azure-cli")
.build());
var accountUser = new User("accountUser", UserArgs.builder()
.userName("me@example.com")
.displayName("Example user")
.build(), CustomResourceOptions.builder()
.provider(databricks.azure_account())
.build());
}
}
import pulumi
import pulumi_databricks as databricks
# initialize provider at Azure account-level
azure_account = databricks.Provider("azureAccount",
host="https://accounts.azuredatabricks.net",
account_id="00000000-0000-0000-0000-000000000000",
auth_type="azure-cli")
account_user = databricks.User("accountUser",
user_name="me@example.com",
display_name="Example user",
opts=pulumi.ResourceOptions(provider=databricks["azure_account"]))
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
// initialize provider at Azure account-level
const azureAccount = new databricks.Provider("azureAccount", {
host: "https://accounts.azuredatabricks.net",
accountId: "00000000-0000-0000-0000-000000000000",
authType: "azure-cli",
});
const accountUser = new databricks.User("accountUser", {
userName: "me@example.com",
displayName: "Example user",
}, {
provider: databricks.azure_account,
});
resources:
# initialize provider at Azure account-level
azureAccount:
type: pulumi:providers:databricks
properties:
host: https://accounts.azuredatabricks.net
accountId: 00000000-0000-0000-0000-000000000000
authType: azure-cli
accountUser:
type: databricks:User
properties:
userName: me@example.com
displayName: Example user
options:
provider: ${databricks.azure_account}
Create User Resource
new User(name: string, args: UserArgs, opts?: CustomResourceOptions);
@overload
def User(resource_name: str,
opts: Optional[ResourceOptions] = None,
active: Optional[bool] = None,
allow_cluster_create: Optional[bool] = None,
allow_instance_pool_create: Optional[bool] = None,
databricks_sql_access: Optional[bool] = None,
display_name: Optional[str] = None,
external_id: Optional[str] = None,
force: Optional[bool] = None,
force_delete_home_dir: Optional[bool] = None,
force_delete_repos: Optional[bool] = None,
home: Optional[str] = None,
repos: Optional[str] = None,
user_name: Optional[str] = None,
workspace_access: Optional[bool] = None)
@overload
def User(resource_name: str,
args: UserArgs,
opts: Optional[ResourceOptions] = None)
func NewUser(ctx *Context, name string, args UserArgs, opts ...ResourceOption) (*User, error)
public User(string name, UserArgs args, CustomResourceOptions? opts = null)
type: databricks:User
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args UserArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args UserArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args UserArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args UserArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args UserArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
User Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The User resource accepts the following input properties:
- User
Name string This is the username of the given user and will be their form of access and identity.
- Active bool
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- Allow
Cluster boolCreate Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and
cluster_id
argument. Everyone withoutallow_cluster_create
argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.- Allow
Instance boolPool Create Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- Databricks
Sql boolAccess This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- Display
Name string This is an alias for the username that can be the full name of the user.
- External
Id string ID of the user in an external identity provider.
- Force bool
- Force
Delete boolHome Dir This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- Force
Delete boolRepos This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- Home string
Home folder of the user, e.g.
/Users/mr.foo@example.com
.- Repos string
Personal Repos location of the user, e.g.
/Repos/mr.foo@example.com
.- Workspace
Access bool
- User
Name string This is the username of the given user and will be their form of access and identity.
- Active bool
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- Allow
Cluster boolCreate Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and
cluster_id
argument. Everyone withoutallow_cluster_create
argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.- Allow
Instance boolPool Create Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- Databricks
Sql boolAccess This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- Display
Name string This is an alias for the username that can be the full name of the user.
- External
Id string ID of the user in an external identity provider.
- Force bool
- Force
Delete boolHome Dir This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- Force
Delete boolRepos This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- Home string
Home folder of the user, e.g.
/Users/mr.foo@example.com
.- Repos string
Personal Repos location of the user, e.g.
/Repos/mr.foo@example.com
.- Workspace
Access bool
- user
Name String This is the username of the given user and will be their form of access and identity.
- active Boolean
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allow
Cluster BooleanCreate Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and
cluster_id
argument. Everyone withoutallow_cluster_create
argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.- allow
Instance BooleanPool Create Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricks
Sql BooleanAccess This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- display
Name String This is an alias for the username that can be the full name of the user.
- external
Id String ID of the user in an external identity provider.
- force Boolean
- force
Delete BooleanHome Dir This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- force
Delete BooleanRepos This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home String
Home folder of the user, e.g.
/Users/mr.foo@example.com
.- repos String
Personal Repos location of the user, e.g.
/Repos/mr.foo@example.com
.- workspace
Access Boolean
- user
Name string This is the username of the given user and will be their form of access and identity.
- active boolean
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allow
Cluster booleanCreate Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and
cluster_id
argument. Everyone withoutallow_cluster_create
argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.- allow
Instance booleanPool Create Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricks
Sql booleanAccess This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- display
Name string This is an alias for the username that can be the full name of the user.
- external
Id string ID of the user in an external identity provider.
- force boolean
- force
Delete booleanHome Dir This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- force
Delete booleanRepos This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home string
Home folder of the user, e.g.
/Users/mr.foo@example.com
.- repos string
Personal Repos location of the user, e.g.
/Repos/mr.foo@example.com
.- workspace
Access boolean
- user_
name str This is the username of the given user and will be their form of access and identity.
- active bool
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allow_
cluster_ boolcreate Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and
cluster_id
argument. Everyone withoutallow_cluster_create
argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.- allow_
instance_ boolpool_ create Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricks_
sql_ boolaccess This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- display_
name str This is an alias for the username that can be the full name of the user.
- external_
id str ID of the user in an external identity provider.
- force bool
- force_
delete_ boolhome_ dir This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- force_
delete_ boolrepos This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home str
Home folder of the user, e.g.
/Users/mr.foo@example.com
.- repos str
Personal Repos location of the user, e.g.
/Repos/mr.foo@example.com
.- workspace_
access bool
- user
Name String This is the username of the given user and will be their form of access and identity.
- active Boolean
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allow
Cluster BooleanCreate Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and
cluster_id
argument. Everyone withoutallow_cluster_create
argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.- allow
Instance BooleanPool Create Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricks
Sql BooleanAccess This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- display
Name String This is an alias for the username that can be the full name of the user.
- external
Id String ID of the user in an external identity provider.
- force Boolean
- force
Delete BooleanHome Dir This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- force
Delete BooleanRepos This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home String
Home folder of the user, e.g.
/Users/mr.foo@example.com
.- repos String
Personal Repos location of the user, e.g.
/Repos/mr.foo@example.com
.- workspace
Access Boolean
Outputs
All input properties are implicitly available as output properties. Additionally, the User resource produces the following output properties:
- Id string
The provider-assigned unique ID for this managed resource.
- Id string
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
- id string
The provider-assigned unique ID for this managed resource.
- id str
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
Look up Existing User Resource
Get an existing User resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: UserState, opts?: CustomResourceOptions): User
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
active: Optional[bool] = None,
allow_cluster_create: Optional[bool] = None,
allow_instance_pool_create: Optional[bool] = None,
databricks_sql_access: Optional[bool] = None,
display_name: Optional[str] = None,
external_id: Optional[str] = None,
force: Optional[bool] = None,
force_delete_home_dir: Optional[bool] = None,
force_delete_repos: Optional[bool] = None,
home: Optional[str] = None,
repos: Optional[str] = None,
user_name: Optional[str] = None,
workspace_access: Optional[bool] = None) -> User
func GetUser(ctx *Context, name string, id IDInput, state *UserState, opts ...ResourceOption) (*User, error)
public static User Get(string name, Input<string> id, UserState? state, CustomResourceOptions? opts = null)
public static User get(String name, Output<String> id, UserState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Active bool
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- Allow
Cluster boolCreate Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and
cluster_id
argument. Everyone withoutallow_cluster_create
argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.- Allow
Instance boolPool Create Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- Databricks
Sql boolAccess This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- Display
Name string This is an alias for the username that can be the full name of the user.
- External
Id string ID of the user in an external identity provider.
- Force bool
- Force
Delete boolHome Dir This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- Force
Delete boolRepos This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- Home string
Home folder of the user, e.g.
/Users/mr.foo@example.com
.- Repos string
Personal Repos location of the user, e.g.
/Repos/mr.foo@example.com
.- User
Name string This is the username of the given user and will be their form of access and identity.
- Workspace
Access bool
- Active bool
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- Allow
Cluster boolCreate Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and
cluster_id
argument. Everyone withoutallow_cluster_create
argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.- Allow
Instance boolPool Create Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- Databricks
Sql boolAccess This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- Display
Name string This is an alias for the username that can be the full name of the user.
- External
Id string ID of the user in an external identity provider.
- Force bool
- Force
Delete boolHome Dir This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- Force
Delete boolRepos This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- Home string
Home folder of the user, e.g.
/Users/mr.foo@example.com
.- Repos string
Personal Repos location of the user, e.g.
/Repos/mr.foo@example.com
.- User
Name string This is the username of the given user and will be their form of access and identity.
- Workspace
Access bool
- active Boolean
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allow
Cluster BooleanCreate Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and
cluster_id
argument. Everyone withoutallow_cluster_create
argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.- allow
Instance BooleanPool Create Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricks
Sql BooleanAccess This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- display
Name String This is an alias for the username that can be the full name of the user.
- external
Id String ID of the user in an external identity provider.
- force Boolean
- force
Delete BooleanHome Dir This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- force
Delete BooleanRepos This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home String
Home folder of the user, e.g.
/Users/mr.foo@example.com
.- repos String
Personal Repos location of the user, e.g.
/Repos/mr.foo@example.com
.- user
Name String This is the username of the given user and will be their form of access and identity.
- workspace
Access Boolean
- active boolean
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allow
Cluster booleanCreate Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and
cluster_id
argument. Everyone withoutallow_cluster_create
argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.- allow
Instance booleanPool Create Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricks
Sql booleanAccess This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- display
Name string This is an alias for the username that can be the full name of the user.
- external
Id string ID of the user in an external identity provider.
- force boolean
- force
Delete booleanHome Dir This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- force
Delete booleanRepos This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home string
Home folder of the user, e.g.
/Users/mr.foo@example.com
.- repos string
Personal Repos location of the user, e.g.
/Repos/mr.foo@example.com
.- user
Name string This is the username of the given user and will be their form of access and identity.
- workspace
Access boolean
- active bool
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allow_
cluster_ boolcreate Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and
cluster_id
argument. Everyone withoutallow_cluster_create
argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.- allow_
instance_ boolpool_ create Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricks_
sql_ boolaccess This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- display_
name str This is an alias for the username that can be the full name of the user.
- external_
id str ID of the user in an external identity provider.
- force bool
- force_
delete_ boolhome_ dir This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- force_
delete_ boolrepos This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home str
Home folder of the user, e.g.
/Users/mr.foo@example.com
.- repos str
Personal Repos location of the user, e.g.
/Repos/mr.foo@example.com
.- user_
name str This is the username of the given user and will be their form of access and identity.
- workspace_
access bool
- active Boolean
Either user is active or not. True by default, but can be set to false in case of user deactivation with preserving user assets.
- allow
Cluster BooleanCreate Allow the user to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and
cluster_id
argument. Everyone withoutallow_cluster_create
argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.- allow
Instance BooleanPool Create Allow the user to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
- databricks
Sql BooleanAccess This is a field to allow the group to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
- display
Name String This is an alias for the username that can be the full name of the user.
- external
Id String ID of the user in an external identity provider.
- force Boolean
- force
Delete BooleanHome Dir This flag determines whether the user's home directory is deleted when the user is deleted. It will have not impact when in the accounts SCIM API. False by default.
- force
Delete BooleanRepos This flag determines whether the user's repo directory is deleted when the user is deleted. It will have no impact when in the accounts SCIM API. False by default.
- home String
Home folder of the user, e.g.
/Users/mr.foo@example.com
.- repos String
Personal Repos location of the user, e.g.
/Repos/mr.foo@example.com
.- user
Name String This is the username of the given user and will be their form of access and identity.
- workspace
Access Boolean
Import
The resource scim user can be imported using idbash
$ pulumi import databricks:index/user:User me <user-id>
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
databricks
Terraform Provider.