databricks.Permissions
Explore with Pulumi AI
Import
Import Example
Configuration file:
hcl
resource “databricks_mlflow_model” “model” {
name = “example_model”
description = “MLflow registered model”
}
resource “databricks_permissions” “model_usage” {
registered_model_id = databricks_mlflow_model.model.registered_model_id
access_control {
group_name = "users"
permission_level = "CAN_READ"
}
}
Import command:
bash
$ pulumi import databricks:index/permissions:Permissions model_usage /registered-models/<registered_model_id>
Create Permissions Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Permissions(name: string, args: PermissionsArgs, opts?: CustomResourceOptions);
@overload
def Permissions(resource_name: str,
args: PermissionsArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Permissions(resource_name: str,
opts: Optional[ResourceOptions] = None,
access_controls: Optional[Sequence[PermissionsAccessControlArgs]] = None,
authorization: Optional[str] = None,
cluster_id: Optional[str] = None,
cluster_policy_id: Optional[str] = None,
directory_id: Optional[str] = None,
directory_path: Optional[str] = None,
experiment_id: Optional[str] = None,
instance_pool_id: Optional[str] = None,
job_id: Optional[str] = None,
notebook_id: Optional[str] = None,
notebook_path: Optional[str] = None,
object_type: Optional[str] = None,
pipeline_id: Optional[str] = None,
registered_model_id: Optional[str] = None,
repo_id: Optional[str] = None,
repo_path: Optional[str] = None,
serving_endpoint_id: Optional[str] = None,
sql_alert_id: Optional[str] = None,
sql_dashboard_id: Optional[str] = None,
sql_endpoint_id: Optional[str] = None,
sql_query_id: Optional[str] = None,
workspace_file_id: Optional[str] = None,
workspace_file_path: Optional[str] = None)
func NewPermissions(ctx *Context, name string, args PermissionsArgs, opts ...ResourceOption) (*Permissions, error)
public Permissions(string name, PermissionsArgs args, CustomResourceOptions? opts = null)
public Permissions(String name, PermissionsArgs args)
public Permissions(String name, PermissionsArgs args, CustomResourceOptions options)
type: databricks:Permissions
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args PermissionsArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args PermissionsArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args PermissionsArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args PermissionsArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args PermissionsArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Example
The following reference example uses placeholder values for all input properties.
var permissionsResource = new Databricks.Permissions("permissionsResource", new()
{
AccessControls = new[]
{
new Databricks.Inputs.PermissionsAccessControlArgs
{
PermissionLevel = "string",
GroupName = "string",
ServicePrincipalName = "string",
UserName = "string",
},
},
Authorization = "string",
ClusterId = "string",
ClusterPolicyId = "string",
DirectoryId = "string",
DirectoryPath = "string",
ExperimentId = "string",
InstancePoolId = "string",
JobId = "string",
NotebookId = "string",
NotebookPath = "string",
ObjectType = "string",
PipelineId = "string",
RegisteredModelId = "string",
RepoId = "string",
RepoPath = "string",
ServingEndpointId = "string",
SqlAlertId = "string",
SqlDashboardId = "string",
SqlEndpointId = "string",
SqlQueryId = "string",
WorkspaceFileId = "string",
WorkspaceFilePath = "string",
});
example, err := databricks.NewPermissions(ctx, "permissionsResource", &databricks.PermissionsArgs{
AccessControls: databricks.PermissionsAccessControlArray{
&databricks.PermissionsAccessControlArgs{
PermissionLevel: pulumi.String("string"),
GroupName: pulumi.String("string"),
ServicePrincipalName: pulumi.String("string"),
UserName: pulumi.String("string"),
},
},
Authorization: pulumi.String("string"),
ClusterId: pulumi.String("string"),
ClusterPolicyId: pulumi.String("string"),
DirectoryId: pulumi.String("string"),
DirectoryPath: pulumi.String("string"),
ExperimentId: pulumi.String("string"),
InstancePoolId: pulumi.String("string"),
JobId: pulumi.String("string"),
NotebookId: pulumi.String("string"),
NotebookPath: pulumi.String("string"),
ObjectType: pulumi.String("string"),
PipelineId: pulumi.String("string"),
RegisteredModelId: pulumi.String("string"),
RepoId: pulumi.String("string"),
RepoPath: pulumi.String("string"),
ServingEndpointId: pulumi.String("string"),
SqlAlertId: pulumi.String("string"),
SqlDashboardId: pulumi.String("string"),
SqlEndpointId: pulumi.String("string"),
SqlQueryId: pulumi.String("string"),
WorkspaceFileId: pulumi.String("string"),
WorkspaceFilePath: pulumi.String("string"),
})
var permissionsResource = new Permissions("permissionsResource", PermissionsArgs.builder()
.accessControls(PermissionsAccessControlArgs.builder()
.permissionLevel("string")
.groupName("string")
.servicePrincipalName("string")
.userName("string")
.build())
.authorization("string")
.clusterId("string")
.clusterPolicyId("string")
.directoryId("string")
.directoryPath("string")
.experimentId("string")
.instancePoolId("string")
.jobId("string")
.notebookId("string")
.notebookPath("string")
.objectType("string")
.pipelineId("string")
.registeredModelId("string")
.repoId("string")
.repoPath("string")
.servingEndpointId("string")
.sqlAlertId("string")
.sqlDashboardId("string")
.sqlEndpointId("string")
.sqlQueryId("string")
.workspaceFileId("string")
.workspaceFilePath("string")
.build());
permissions_resource = databricks.Permissions("permissionsResource",
access_controls=[databricks.PermissionsAccessControlArgs(
permission_level="string",
group_name="string",
service_principal_name="string",
user_name="string",
)],
authorization="string",
cluster_id="string",
cluster_policy_id="string",
directory_id="string",
directory_path="string",
experiment_id="string",
instance_pool_id="string",
job_id="string",
notebook_id="string",
notebook_path="string",
object_type="string",
pipeline_id="string",
registered_model_id="string",
repo_id="string",
repo_path="string",
serving_endpoint_id="string",
sql_alert_id="string",
sql_dashboard_id="string",
sql_endpoint_id="string",
sql_query_id="string",
workspace_file_id="string",
workspace_file_path="string")
const permissionsResource = new databricks.Permissions("permissionsResource", {
accessControls: [{
permissionLevel: "string",
groupName: "string",
servicePrincipalName: "string",
userName: "string",
}],
authorization: "string",
clusterId: "string",
clusterPolicyId: "string",
directoryId: "string",
directoryPath: "string",
experimentId: "string",
instancePoolId: "string",
jobId: "string",
notebookId: "string",
notebookPath: "string",
objectType: "string",
pipelineId: "string",
registeredModelId: "string",
repoId: "string",
repoPath: "string",
servingEndpointId: "string",
sqlAlertId: "string",
sqlDashboardId: "string",
sqlEndpointId: "string",
sqlQueryId: "string",
workspaceFileId: "string",
workspaceFilePath: "string",
});
type: databricks:Permissions
properties:
accessControls:
- groupName: string
permissionLevel: string
servicePrincipalName: string
userName: string
authorization: string
clusterId: string
clusterPolicyId: string
directoryId: string
directoryPath: string
experimentId: string
instancePoolId: string
jobId: string
notebookId: string
notebookPath: string
objectType: string
pipelineId: string
registeredModelId: string
repoId: string
repoPath: string
servingEndpointId: string
sqlAlertId: string
sqlDashboardId: string
sqlEndpointId: string
sqlQueryId: string
workspaceFileId: string
workspaceFilePath: string
Permissions Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The Permissions resource accepts the following input properties:
- Access
Controls List<PermissionsAccess Control> - string
- either
tokens
orpasswords
. - Cluster
Id string - cluster id
- Cluster
Policy stringId - cluster policy id
- Directory
Id string - directory id
- Directory
Path string - path of directory
- Experiment
Id string - MLflow experiment id
- Instance
Pool stringId - instance pool id
- Job
Id string - job id
- Notebook
Id string - ID of notebook within workspace
- Notebook
Path string - path of notebook
- Object
Type string - type of permissions.
- Pipeline
Id string - pipeline id
- Registered
Model stringId - MLflow registered model id
- Repo
Id string - repo id
- Repo
Path string - path of databricks repo directory(
/Repos/<username>/...
) - Serving
Endpoint stringId - Model Serving endpoint id.
- Sql
Alert stringId - SQL alert id
- Sql
Dashboard stringId - SQL dashboard id
- Sql
Endpoint stringId - SQL warehouse id
- Sql
Query stringId - SQL query id
- Workspace
File stringId - Workspace
File stringPath
- Access
Controls []PermissionsAccess Control Args - string
- either
tokens
orpasswords
. - Cluster
Id string - cluster id
- Cluster
Policy stringId - cluster policy id
- Directory
Id string - directory id
- Directory
Path string - path of directory
- Experiment
Id string - MLflow experiment id
- Instance
Pool stringId - instance pool id
- Job
Id string - job id
- Notebook
Id string - ID of notebook within workspace
- Notebook
Path string - path of notebook
- Object
Type string - type of permissions.
- Pipeline
Id string - pipeline id
- Registered
Model stringId - MLflow registered model id
- Repo
Id string - repo id
- Repo
Path string - path of databricks repo directory(
/Repos/<username>/...
) - Serving
Endpoint stringId - Model Serving endpoint id.
- Sql
Alert stringId - SQL alert id
- Sql
Dashboard stringId - SQL dashboard id
- Sql
Endpoint stringId - SQL warehouse id
- Sql
Query stringId - SQL query id
- Workspace
File stringId - Workspace
File stringPath
- access
Controls List<PermissionsAccess Control> - String
- either
tokens
orpasswords
. - cluster
Id String - cluster id
- cluster
Policy StringId - cluster policy id
- directory
Id String - directory id
- directory
Path String - path of directory
- experiment
Id String - MLflow experiment id
- instance
Pool StringId - instance pool id
- job
Id String - job id
- notebook
Id String - ID of notebook within workspace
- notebook
Path String - path of notebook
- object
Type String - type of permissions.
- pipeline
Id String - pipeline id
- registered
Model StringId - MLflow registered model id
- repo
Id String - repo id
- repo
Path String - path of databricks repo directory(
/Repos/<username>/...
) - serving
Endpoint StringId - Model Serving endpoint id.
- sql
Alert StringId - SQL alert id
- sql
Dashboard StringId - SQL dashboard id
- sql
Endpoint StringId - SQL warehouse id
- sql
Query StringId - SQL query id
- workspace
File StringId - workspace
File StringPath
- access
Controls PermissionsAccess Control[] - string
- either
tokens
orpasswords
. - cluster
Id string - cluster id
- cluster
Policy stringId - cluster policy id
- directory
Id string - directory id
- directory
Path string - path of directory
- experiment
Id string - MLflow experiment id
- instance
Pool stringId - instance pool id
- job
Id string - job id
- notebook
Id string - ID of notebook within workspace
- notebook
Path string - path of notebook
- object
Type string - type of permissions.
- pipeline
Id string - pipeline id
- registered
Model stringId - MLflow registered model id
- repo
Id string - repo id
- repo
Path string - path of databricks repo directory(
/Repos/<username>/...
) - serving
Endpoint stringId - Model Serving endpoint id.
- sql
Alert stringId - SQL alert id
- sql
Dashboard stringId - SQL dashboard id
- sql
Endpoint stringId - SQL warehouse id
- sql
Query stringId - SQL query id
- workspace
File stringId - workspace
File stringPath
- access_
controls Sequence[PermissionsAccess Control Args] - str
- either
tokens
orpasswords
. - cluster_
id str - cluster id
- cluster_
policy_ strid - cluster policy id
- directory_
id str - directory id
- directory_
path str - path of directory
- experiment_
id str - MLflow experiment id
- instance_
pool_ strid - instance pool id
- job_
id str - job id
- notebook_
id str - ID of notebook within workspace
- notebook_
path str - path of notebook
- object_
type str - type of permissions.
- pipeline_
id str - pipeline id
- registered_
model_ strid - MLflow registered model id
- repo_
id str - repo id
- repo_
path str - path of databricks repo directory(
/Repos/<username>/...
) - serving_
endpoint_ strid - Model Serving endpoint id.
- sql_
alert_ strid - SQL alert id
- sql_
dashboard_ strid - SQL dashboard id
- sql_
endpoint_ strid - SQL warehouse id
- sql_
query_ strid - SQL query id
- workspace_
file_ strid - workspace_
file_ strpath
- access
Controls List<Property Map> - String
- either
tokens
orpasswords
. - cluster
Id String - cluster id
- cluster
Policy StringId - cluster policy id
- directory
Id String - directory id
- directory
Path String - path of directory
- experiment
Id String - MLflow experiment id
- instance
Pool StringId - instance pool id
- job
Id String - job id
- notebook
Id String - ID of notebook within workspace
- notebook
Path String - path of notebook
- object
Type String - type of permissions.
- pipeline
Id String - pipeline id
- registered
Model StringId - MLflow registered model id
- repo
Id String - repo id
- repo
Path String - path of databricks repo directory(
/Repos/<username>/...
) - serving
Endpoint StringId - Model Serving endpoint id.
- sql
Alert StringId - SQL alert id
- sql
Dashboard StringId - SQL dashboard id
- sql
Endpoint StringId - SQL warehouse id
- sql
Query StringId - SQL query id
- workspace
File StringId - workspace
File StringPath
Outputs
All input properties are implicitly available as output properties. Additionally, the Permissions resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing Permissions Resource
Get an existing Permissions resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: PermissionsState, opts?: CustomResourceOptions): Permissions
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
access_controls: Optional[Sequence[PermissionsAccessControlArgs]] = None,
authorization: Optional[str] = None,
cluster_id: Optional[str] = None,
cluster_policy_id: Optional[str] = None,
directory_id: Optional[str] = None,
directory_path: Optional[str] = None,
experiment_id: Optional[str] = None,
instance_pool_id: Optional[str] = None,
job_id: Optional[str] = None,
notebook_id: Optional[str] = None,
notebook_path: Optional[str] = None,
object_type: Optional[str] = None,
pipeline_id: Optional[str] = None,
registered_model_id: Optional[str] = None,
repo_id: Optional[str] = None,
repo_path: Optional[str] = None,
serving_endpoint_id: Optional[str] = None,
sql_alert_id: Optional[str] = None,
sql_dashboard_id: Optional[str] = None,
sql_endpoint_id: Optional[str] = None,
sql_query_id: Optional[str] = None,
workspace_file_id: Optional[str] = None,
workspace_file_path: Optional[str] = None) -> Permissions
func GetPermissions(ctx *Context, name string, id IDInput, state *PermissionsState, opts ...ResourceOption) (*Permissions, error)
public static Permissions Get(string name, Input<string> id, PermissionsState? state, CustomResourceOptions? opts = null)
public static Permissions get(String name, Output<String> id, PermissionsState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Access
Controls List<PermissionsAccess Control> - string
- either
tokens
orpasswords
. - Cluster
Id string - cluster id
- Cluster
Policy stringId - cluster policy id
- Directory
Id string - directory id
- Directory
Path string - path of directory
- Experiment
Id string - MLflow experiment id
- Instance
Pool stringId - instance pool id
- Job
Id string - job id
- Notebook
Id string - ID of notebook within workspace
- Notebook
Path string - path of notebook
- Object
Type string - type of permissions.
- Pipeline
Id string - pipeline id
- Registered
Model stringId - MLflow registered model id
- Repo
Id string - repo id
- Repo
Path string - path of databricks repo directory(
/Repos/<username>/...
) - Serving
Endpoint stringId - Model Serving endpoint id.
- Sql
Alert stringId - SQL alert id
- Sql
Dashboard stringId - SQL dashboard id
- Sql
Endpoint stringId - SQL warehouse id
- Sql
Query stringId - SQL query id
- Workspace
File stringId - Workspace
File stringPath
- Access
Controls []PermissionsAccess Control Args - string
- either
tokens
orpasswords
. - Cluster
Id string - cluster id
- Cluster
Policy stringId - cluster policy id
- Directory
Id string - directory id
- Directory
Path string - path of directory
- Experiment
Id string - MLflow experiment id
- Instance
Pool stringId - instance pool id
- Job
Id string - job id
- Notebook
Id string - ID of notebook within workspace
- Notebook
Path string - path of notebook
- Object
Type string - type of permissions.
- Pipeline
Id string - pipeline id
- Registered
Model stringId - MLflow registered model id
- Repo
Id string - repo id
- Repo
Path string - path of databricks repo directory(
/Repos/<username>/...
) - Serving
Endpoint stringId - Model Serving endpoint id.
- Sql
Alert stringId - SQL alert id
- Sql
Dashboard stringId - SQL dashboard id
- Sql
Endpoint stringId - SQL warehouse id
- Sql
Query stringId - SQL query id
- Workspace
File stringId - Workspace
File stringPath
- access
Controls List<PermissionsAccess Control> - String
- either
tokens
orpasswords
. - cluster
Id String - cluster id
- cluster
Policy StringId - cluster policy id
- directory
Id String - directory id
- directory
Path String - path of directory
- experiment
Id String - MLflow experiment id
- instance
Pool StringId - instance pool id
- job
Id String - job id
- notebook
Id String - ID of notebook within workspace
- notebook
Path String - path of notebook
- object
Type String - type of permissions.
- pipeline
Id String - pipeline id
- registered
Model StringId - MLflow registered model id
- repo
Id String - repo id
- repo
Path String - path of databricks repo directory(
/Repos/<username>/...
) - serving
Endpoint StringId - Model Serving endpoint id.
- sql
Alert StringId - SQL alert id
- sql
Dashboard StringId - SQL dashboard id
- sql
Endpoint StringId - SQL warehouse id
- sql
Query StringId - SQL query id
- workspace
File StringId - workspace
File StringPath
- access
Controls PermissionsAccess Control[] - string
- either
tokens
orpasswords
. - cluster
Id string - cluster id
- cluster
Policy stringId - cluster policy id
- directory
Id string - directory id
- directory
Path string - path of directory
- experiment
Id string - MLflow experiment id
- instance
Pool stringId - instance pool id
- job
Id string - job id
- notebook
Id string - ID of notebook within workspace
- notebook
Path string - path of notebook
- object
Type string - type of permissions.
- pipeline
Id string - pipeline id
- registered
Model stringId - MLflow registered model id
- repo
Id string - repo id
- repo
Path string - path of databricks repo directory(
/Repos/<username>/...
) - serving
Endpoint stringId - Model Serving endpoint id.
- sql
Alert stringId - SQL alert id
- sql
Dashboard stringId - SQL dashboard id
- sql
Endpoint stringId - SQL warehouse id
- sql
Query stringId - SQL query id
- workspace
File stringId - workspace
File stringPath
- access_
controls Sequence[PermissionsAccess Control Args] - str
- either
tokens
orpasswords
. - cluster_
id str - cluster id
- cluster_
policy_ strid - cluster policy id
- directory_
id str - directory id
- directory_
path str - path of directory
- experiment_
id str - MLflow experiment id
- instance_
pool_ strid - instance pool id
- job_
id str - job id
- notebook_
id str - ID of notebook within workspace
- notebook_
path str - path of notebook
- object_
type str - type of permissions.
- pipeline_
id str - pipeline id
- registered_
model_ strid - MLflow registered model id
- repo_
id str - repo id
- repo_
path str - path of databricks repo directory(
/Repos/<username>/...
) - serving_
endpoint_ strid - Model Serving endpoint id.
- sql_
alert_ strid - SQL alert id
- sql_
dashboard_ strid - SQL dashboard id
- sql_
endpoint_ strid - SQL warehouse id
- sql_
query_ strid - SQL query id
- workspace_
file_ strid - workspace_
file_ strpath
- access
Controls List<Property Map> - String
- either
tokens
orpasswords
. - cluster
Id String - cluster id
- cluster
Policy StringId - cluster policy id
- directory
Id String - directory id
- directory
Path String - path of directory
- experiment
Id String - MLflow experiment id
- instance
Pool StringId - instance pool id
- job
Id String - job id
- notebook
Id String - ID of notebook within workspace
- notebook
Path String - path of notebook
- object
Type String - type of permissions.
- pipeline
Id String - pipeline id
- registered
Model StringId - MLflow registered model id
- repo
Id String - repo id
- repo
Path String - path of databricks repo directory(
/Repos/<username>/...
) - serving
Endpoint StringId - Model Serving endpoint id.
- sql
Alert StringId - SQL alert id
- sql
Dashboard StringId - SQL dashboard id
- sql
Endpoint StringId - SQL warehouse id
- sql
Query StringId - SQL query id
- workspace
File StringId - workspace
File StringPath
Supporting Types
PermissionsAccessControl, PermissionsAccessControlArgs
- Permission
Level string permission level according to specific resource. See examples above for the reference.
Exactly one of the below arguments is required:
- Group
Name string - name of the group. We recommend setting permissions on groups.
- Service
Principal stringName - Application ID of the service_principal.
- User
Name string - name of the user.
- Permission
Level string permission level according to specific resource. See examples above for the reference.
Exactly one of the below arguments is required:
- Group
Name string - name of the group. We recommend setting permissions on groups.
- Service
Principal stringName - Application ID of the service_principal.
- User
Name string - name of the user.
- permission
Level String permission level according to specific resource. See examples above for the reference.
Exactly one of the below arguments is required:
- group
Name String - name of the group. We recommend setting permissions on groups.
- service
Principal StringName - Application ID of the service_principal.
- user
Name String - name of the user.
- permission
Level string permission level according to specific resource. See examples above for the reference.
Exactly one of the below arguments is required:
- group
Name string - name of the group. We recommend setting permissions on groups.
- service
Principal stringName - Application ID of the service_principal.
- user
Name string - name of the user.
- permission_
level str permission level according to specific resource. See examples above for the reference.
Exactly one of the below arguments is required:
- group_
name str - name of the group. We recommend setting permissions on groups.
- service_
principal_ strname - Application ID of the service_principal.
- user_
name str - name of the user.
- permission
Level String permission level according to specific resource. See examples above for the reference.
Exactly one of the below arguments is required:
- group
Name String - name of the group. We recommend setting permissions on groups.
- service
Principal StringName - Application ID of the service_principal.
- user
Name String - name of the user.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
databricks
Terraform Provider.