databricks.Credential
Explore with Pulumi AI
This resource can only be used with a workspace-level provider.
This feature is in Public Preview.
A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant. Each credential is subject to Unity Catalog access-control policies that control which users and groups can access the credential.
The type of credential to be created is determined by the purpose
field, which should be either SERVICE
or STORAGE
.
The caller must be a metastore admin or have the metastore privilege CREATE_STORAGE_CREDENTIAL
for storage credentials, or CREATE_SERVICE_CREDENTIAL
for service credentials. The user who creates the credential can delegate ownership to another user or group to manage permissions on it
On AWS, the IAM role for a credential requires a trust policy. See documentation for more details. The data source databricks.getAwsUnityCatalogAssumeRolePolicy can be used to create the necessary AWS Unity Catalog assume role policy.
Example Usage
For AWS
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const external = new databricks.Credential("external", {
name: externalDataAccess.name,
awsIamRole: {
roleArn: externalDataAccess.arn,
},
purpose: "SERVICE",
comment: "Managed by TF",
});
const externalCreds = new databricks.Grants("external_creds", {
credential: external.id,
grants: [{
principal: "Data Engineers",
privileges: ["ACCESS"],
}],
});
import pulumi
import pulumi_databricks as databricks
external = databricks.Credential("external",
name=external_data_access["name"],
aws_iam_role={
"role_arn": external_data_access["arn"],
},
purpose="SERVICE",
comment="Managed by TF")
external_creds = databricks.Grants("external_creds",
credential=external.id,
grants=[{
"principal": "Data Engineers",
"privileges": ["ACCESS"],
}])
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
external, err := databricks.NewCredential(ctx, "external", &databricks.CredentialArgs{
Name: pulumi.Any(externalDataAccess.Name),
AwsIamRole: &databricks.CredentialAwsIamRoleArgs{
RoleArn: pulumi.Any(externalDataAccess.Arn),
},
Purpose: pulumi.String("SERVICE"),
Comment: pulumi.String("Managed by TF"),
})
if err != nil {
return err
}
_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
Credential: external.ID(),
Grants: databricks.GrantsGrantArray{
&databricks.GrantsGrantArgs{
Principal: pulumi.String("Data Engineers"),
Privileges: pulumi.StringArray{
pulumi.String("ACCESS"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var external = new Databricks.Credential("external", new()
{
Name = externalDataAccess.Name,
AwsIamRole = new Databricks.Inputs.CredentialAwsIamRoleArgs
{
RoleArn = externalDataAccess.Arn,
},
Purpose = "SERVICE",
Comment = "Managed by TF",
});
var externalCreds = new Databricks.Grants("external_creds", new()
{
Credential = external.Id,
GrantDetails = new[]
{
new Databricks.Inputs.GrantsGrantArgs
{
Principal = "Data Engineers",
Privileges = new[]
{
"ACCESS",
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Credential;
import com.pulumi.databricks.CredentialArgs;
import com.pulumi.databricks.inputs.CredentialAwsIamRoleArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var external = new Credential("external", CredentialArgs.builder()
.name(externalDataAccess.name())
.awsIamRole(CredentialAwsIamRoleArgs.builder()
.roleArn(externalDataAccess.arn())
.build())
.purpose("SERVICE")
.comment("Managed by TF")
.build());
var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
.credential(external.id())
.grants(GrantsGrantArgs.builder()
.principal("Data Engineers")
.privileges("ACCESS")
.build())
.build());
}
}
resources:
external:
type: databricks:Credential
properties:
name: ${externalDataAccess.name}
awsIamRole:
roleArn: ${externalDataAccess.arn}
purpose: SERVICE
comment: Managed by TF
externalCreds:
type: databricks:Grants
name: external_creds
properties:
credential: ${external.id}
grants:
- principal: Data Engineers
privileges:
- ACCESS
For Azure
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const externalMi = new databricks.Credential("external_mi", {
name: "mi_credential",
azureManagedIdentity: {
accessConnectorId: example.id,
},
purpose: "SERVICE",
comment: "Managed identity credential managed by TF",
});
const externalCreds = new databricks.Grants("external_creds", {
credential: externalMi.id,
grants: [{
principal: "Data Engineers",
privileges: ["ACCESS"],
}],
});
import pulumi
import pulumi_databricks as databricks
external_mi = databricks.Credential("external_mi",
name="mi_credential",
azure_managed_identity={
"access_connector_id": example["id"],
},
purpose="SERVICE",
comment="Managed identity credential managed by TF")
external_creds = databricks.Grants("external_creds",
credential=external_mi.id,
grants=[{
"principal": "Data Engineers",
"privileges": ["ACCESS"],
}])
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
externalMi, err := databricks.NewCredential(ctx, "external_mi", &databricks.CredentialArgs{
Name: pulumi.String("mi_credential"),
AzureManagedIdentity: &databricks.CredentialAzureManagedIdentityArgs{
AccessConnectorId: pulumi.Any(example.Id),
},
Purpose: pulumi.String("SERVICE"),
Comment: pulumi.String("Managed identity credential managed by TF"),
})
if err != nil {
return err
}
_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
Credential: externalMi.ID(),
Grants: databricks.GrantsGrantArray{
&databricks.GrantsGrantArgs{
Principal: pulumi.String("Data Engineers"),
Privileges: pulumi.StringArray{
pulumi.String("ACCESS"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var externalMi = new Databricks.Credential("external_mi", new()
{
Name = "mi_credential",
AzureManagedIdentity = new Databricks.Inputs.CredentialAzureManagedIdentityArgs
{
AccessConnectorId = example.Id,
},
Purpose = "SERVICE",
Comment = "Managed identity credential managed by TF",
});
var externalCreds = new Databricks.Grants("external_creds", new()
{
Credential = externalMi.Id,
GrantDetails = new[]
{
new Databricks.Inputs.GrantsGrantArgs
{
Principal = "Data Engineers",
Privileges = new[]
{
"ACCESS",
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Credential;
import com.pulumi.databricks.CredentialArgs;
import com.pulumi.databricks.inputs.CredentialAzureManagedIdentityArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var externalMi = new Credential("externalMi", CredentialArgs.builder()
.name("mi_credential")
.azureManagedIdentity(CredentialAzureManagedIdentityArgs.builder()
.accessConnectorId(example.id())
.build())
.purpose("SERVICE")
.comment("Managed identity credential managed by TF")
.build());
var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
.credential(externalMi.id())
.grants(GrantsGrantArgs.builder()
.principal("Data Engineers")
.privileges("ACCESS")
.build())
.build());
}
}
resources:
externalMi:
type: databricks:Credential
name: external_mi
properties:
name: mi_credential
azureManagedIdentity:
accessConnectorId: ${example.id}
purpose: SERVICE
comment: Managed identity credential managed by TF
externalCreds:
type: databricks:Grants
name: external_creds
properties:
credential: ${externalMi.id}
grants:
- principal: Data Engineers
privileges:
- ACCESS
For GCP (only applicable when purpose is STORAGE
)
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const externalGcpSa = new databricks.Credential("external_gcp_sa", {
name: "gcp_sa_credential",
databricksGcpServiceAccount: {},
purpose: "STORAGE",
comment: "GCP SA credential managed by TF",
});
const externalCreds = new databricks.Grants("external_creds", {
credential: externalGcpSa.id,
grants: [{
principal: "Data Engineers",
privileges: ["ACCESS"],
}],
});
import pulumi
import pulumi_databricks as databricks
external_gcp_sa = databricks.Credential("external_gcp_sa",
name="gcp_sa_credential",
databricks_gcp_service_account={},
purpose="STORAGE",
comment="GCP SA credential managed by TF")
external_creds = databricks.Grants("external_creds",
credential=external_gcp_sa.id,
grants=[{
"principal": "Data Engineers",
"privileges": ["ACCESS"],
}])
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
externalGcpSa, err := databricks.NewCredential(ctx, "external_gcp_sa", &databricks.CredentialArgs{
Name: pulumi.String("gcp_sa_credential"),
DatabricksGcpServiceAccount: &databricks.CredentialDatabricksGcpServiceAccountArgs{},
Purpose: pulumi.String("STORAGE"),
Comment: pulumi.String("GCP SA credential managed by TF"),
})
if err != nil {
return err
}
_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
Credential: externalGcpSa.ID(),
Grants: databricks.GrantsGrantArray{
&databricks.GrantsGrantArgs{
Principal: pulumi.String("Data Engineers"),
Privileges: pulumi.StringArray{
pulumi.String("ACCESS"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var externalGcpSa = new Databricks.Credential("external_gcp_sa", new()
{
Name = "gcp_sa_credential",
DatabricksGcpServiceAccount = null,
Purpose = "STORAGE",
Comment = "GCP SA credential managed by TF",
});
var externalCreds = new Databricks.Grants("external_creds", new()
{
Credential = externalGcpSa.Id,
GrantDetails = new[]
{
new Databricks.Inputs.GrantsGrantArgs
{
Principal = "Data Engineers",
Privileges = new[]
{
"ACCESS",
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Credential;
import com.pulumi.databricks.CredentialArgs;
import com.pulumi.databricks.inputs.CredentialDatabricksGcpServiceAccountArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var externalGcpSa = new Credential("externalGcpSa", CredentialArgs.builder()
.name("gcp_sa_credential")
.databricksGcpServiceAccount()
.purpose("STORAGE")
.comment("GCP SA credential managed by TF")
.build());
var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
.credential(externalGcpSa.id())
.grants(GrantsGrantArgs.builder()
.principal("Data Engineers")
.privileges("ACCESS")
.build())
.build());
}
}
resources:
externalGcpSa:
type: databricks:Credential
name: external_gcp_sa
properties:
name: gcp_sa_credential
databricksGcpServiceAccount: {}
purpose: STORAGE
comment: GCP SA credential managed by TF
externalCreds:
type: databricks:Grants
name: external_creds
properties:
credential: ${externalGcpSa.id}
grants:
- principal: Data Engineers
privileges:
- ACCESS
Create Credential Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Credential(name: string, args: CredentialArgs, opts?: CustomResourceOptions);
@overload
def Credential(resource_name: str,
args: CredentialArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Credential(resource_name: str,
opts: Optional[ResourceOptions] = None,
purpose: Optional[str] = None,
isolation_mode: Optional[str] = None,
used_for_managed_storage: Optional[bool] = None,
full_name: Optional[str] = None,
created_at: Optional[int] = None,
created_by: Optional[str] = None,
databricks_gcp_service_account: Optional[CredentialDatabricksGcpServiceAccountArgs] = None,
force_destroy: Optional[bool] = None,
force_update: Optional[bool] = None,
comment: Optional[str] = None,
azure_service_principal: Optional[CredentialAzureServicePrincipalArgs] = None,
name: Optional[str] = None,
metastore_id: Optional[str] = None,
owner: Optional[str] = None,
azure_managed_identity: Optional[CredentialAzureManagedIdentityArgs] = None,
read_only: Optional[bool] = None,
skip_validation: Optional[bool] = None,
updated_at: Optional[int] = None,
updated_by: Optional[str] = None,
aws_iam_role: Optional[CredentialAwsIamRoleArgs] = None)
func NewCredential(ctx *Context, name string, args CredentialArgs, opts ...ResourceOption) (*Credential, error)
public Credential(string name, CredentialArgs args, CustomResourceOptions? opts = null)
public Credential(String name, CredentialArgs args)
public Credential(String name, CredentialArgs args, CustomResourceOptions options)
type: databricks:Credential
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args CredentialArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args CredentialArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args CredentialArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args CredentialArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args CredentialArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var credentialResource = new Databricks.Credential("credentialResource", new()
{
Purpose = "string",
IsolationMode = "string",
UsedForManagedStorage = false,
FullName = "string",
CreatedAt = 0,
CreatedBy = "string",
DatabricksGcpServiceAccount = new Databricks.Inputs.CredentialDatabricksGcpServiceAccountArgs
{
CredentialId = "string",
Email = "string",
PrivateKeyId = "string",
},
ForceDestroy = false,
ForceUpdate = false,
Comment = "string",
AzureServicePrincipal = new Databricks.Inputs.CredentialAzureServicePrincipalArgs
{
ApplicationId = "string",
ClientSecret = "string",
DirectoryId = "string",
},
Name = "string",
MetastoreId = "string",
Owner = "string",
AzureManagedIdentity = new Databricks.Inputs.CredentialAzureManagedIdentityArgs
{
AccessConnectorId = "string",
CredentialId = "string",
ManagedIdentityId = "string",
},
ReadOnly = false,
SkipValidation = false,
UpdatedAt = 0,
UpdatedBy = "string",
AwsIamRole = new Databricks.Inputs.CredentialAwsIamRoleArgs
{
ExternalId = "string",
RoleArn = "string",
UnityCatalogIamArn = "string",
},
});
example, err := databricks.NewCredential(ctx, "credentialResource", &databricks.CredentialArgs{
Purpose: pulumi.String("string"),
IsolationMode: pulumi.String("string"),
UsedForManagedStorage: pulumi.Bool(false),
FullName: pulumi.String("string"),
CreatedAt: pulumi.Int(0),
CreatedBy: pulumi.String("string"),
DatabricksGcpServiceAccount: &databricks.CredentialDatabricksGcpServiceAccountArgs{
CredentialId: pulumi.String("string"),
Email: pulumi.String("string"),
PrivateKeyId: pulumi.String("string"),
},
ForceDestroy: pulumi.Bool(false),
ForceUpdate: pulumi.Bool(false),
Comment: pulumi.String("string"),
AzureServicePrincipal: &databricks.CredentialAzureServicePrincipalArgs{
ApplicationId: pulumi.String("string"),
ClientSecret: pulumi.String("string"),
DirectoryId: pulumi.String("string"),
},
Name: pulumi.String("string"),
MetastoreId: pulumi.String("string"),
Owner: pulumi.String("string"),
AzureManagedIdentity: &databricks.CredentialAzureManagedIdentityArgs{
AccessConnectorId: pulumi.String("string"),
CredentialId: pulumi.String("string"),
ManagedIdentityId: pulumi.String("string"),
},
ReadOnly: pulumi.Bool(false),
SkipValidation: pulumi.Bool(false),
UpdatedAt: pulumi.Int(0),
UpdatedBy: pulumi.String("string"),
AwsIamRole: &databricks.CredentialAwsIamRoleArgs{
ExternalId: pulumi.String("string"),
RoleArn: pulumi.String("string"),
UnityCatalogIamArn: pulumi.String("string"),
},
})
var credentialResource = new Credential("credentialResource", CredentialArgs.builder()
.purpose("string")
.isolationMode("string")
.usedForManagedStorage(false)
.fullName("string")
.createdAt(0)
.createdBy("string")
.databricksGcpServiceAccount(CredentialDatabricksGcpServiceAccountArgs.builder()
.credentialId("string")
.email("string")
.privateKeyId("string")
.build())
.forceDestroy(false)
.forceUpdate(false)
.comment("string")
.azureServicePrincipal(CredentialAzureServicePrincipalArgs.builder()
.applicationId("string")
.clientSecret("string")
.directoryId("string")
.build())
.name("string")
.metastoreId("string")
.owner("string")
.azureManagedIdentity(CredentialAzureManagedIdentityArgs.builder()
.accessConnectorId("string")
.credentialId("string")
.managedIdentityId("string")
.build())
.readOnly(false)
.skipValidation(false)
.updatedAt(0)
.updatedBy("string")
.awsIamRole(CredentialAwsIamRoleArgs.builder()
.externalId("string")
.roleArn("string")
.unityCatalogIamArn("string")
.build())
.build());
credential_resource = databricks.Credential("credentialResource",
purpose="string",
isolation_mode="string",
used_for_managed_storage=False,
full_name="string",
created_at=0,
created_by="string",
databricks_gcp_service_account={
"credential_id": "string",
"email": "string",
"private_key_id": "string",
},
force_destroy=False,
force_update=False,
comment="string",
azure_service_principal={
"application_id": "string",
"client_secret": "string",
"directory_id": "string",
},
name="string",
metastore_id="string",
owner="string",
azure_managed_identity={
"access_connector_id": "string",
"credential_id": "string",
"managed_identity_id": "string",
},
read_only=False,
skip_validation=False,
updated_at=0,
updated_by="string",
aws_iam_role={
"external_id": "string",
"role_arn": "string",
"unity_catalog_iam_arn": "string",
})
const credentialResource = new databricks.Credential("credentialResource", {
purpose: "string",
isolationMode: "string",
usedForManagedStorage: false,
fullName: "string",
createdAt: 0,
createdBy: "string",
databricksGcpServiceAccount: {
credentialId: "string",
email: "string",
privateKeyId: "string",
},
forceDestroy: false,
forceUpdate: false,
comment: "string",
azureServicePrincipal: {
applicationId: "string",
clientSecret: "string",
directoryId: "string",
},
name: "string",
metastoreId: "string",
owner: "string",
azureManagedIdentity: {
accessConnectorId: "string",
credentialId: "string",
managedIdentityId: "string",
},
readOnly: false,
skipValidation: false,
updatedAt: 0,
updatedBy: "string",
awsIamRole: {
externalId: "string",
roleArn: "string",
unityCatalogIamArn: "string",
},
});
type: databricks:Credential
properties:
awsIamRole:
externalId: string
roleArn: string
unityCatalogIamArn: string
azureManagedIdentity:
accessConnectorId: string
credentialId: string
managedIdentityId: string
azureServicePrincipal:
applicationId: string
clientSecret: string
directoryId: string
comment: string
createdAt: 0
createdBy: string
databricksGcpServiceAccount:
credentialId: string
email: string
privateKeyId: string
forceDestroy: false
forceUpdate: false
fullName: string
isolationMode: string
metastoreId: string
name: string
owner: string
purpose: string
readOnly: false
skipValidation: false
updatedAt: 0
updatedBy: string
usedForManagedStorage: false
Credential Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Credential resource accepts the following input properties:
- Purpose string
- Indicates the purpose of the credential. Can be
SERVICE
orSTORAGE
. - Aws
Iam CredentialRole Aws Iam Role - Azure
Managed CredentialIdentity Azure Managed Identity - Azure
Service CredentialPrincipal Azure Service Principal - Comment string
- Created
At int - Created
By string - Databricks
Gcp CredentialService Account Databricks Gcp Service Account - Force
Destroy bool - Delete credential regardless of its dependencies.
- Force
Update bool - Update credential regardless of its dependents.
- Full
Name string - Isolation
Mode string Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically restrict access to only from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- Metastore
Id string - Name string
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the credential owner.
- Read
Only bool - Indicates whether the credential is only usable for read operations. Only applicable when purpose is
STORAGE
. - Skip
Validation bool - Suppress validation errors if any & force save the credential.
- Updated
At int - Updated
By string - Used
For boolManaged Storage
- Purpose string
- Indicates the purpose of the credential. Can be
SERVICE
orSTORAGE
. - Aws
Iam CredentialRole Aws Iam Role Args - Azure
Managed CredentialIdentity Azure Managed Identity Args - Azure
Service CredentialPrincipal Azure Service Principal Args - Comment string
- Created
At int - Created
By string - Databricks
Gcp CredentialService Account Databricks Gcp Service Account Args - Force
Destroy bool - Delete credential regardless of its dependencies.
- Force
Update bool - Update credential regardless of its dependents.
- Full
Name string - Isolation
Mode string Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically restrict access to only from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- Metastore
Id string - Name string
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the credential owner.
- Read
Only bool - Indicates whether the credential is only usable for read operations. Only applicable when purpose is
STORAGE
. - Skip
Validation bool - Suppress validation errors if any & force save the credential.
- Updated
At int - Updated
By string - Used
For boolManaged Storage
- purpose String
- Indicates the purpose of the credential. Can be
SERVICE
orSTORAGE
. - aws
Iam CredentialRole Aws Iam Role - azure
Managed CredentialIdentity Azure Managed Identity - azure
Service CredentialPrincipal Azure Service Principal - comment String
- created
At Integer - created
By String - databricks
Gcp CredentialService Account Databricks Gcp Service Account - force
Destroy Boolean - Delete credential regardless of its dependencies.
- force
Update Boolean - Update credential regardless of its dependents.
- full
Name String - isolation
Mode String Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically restrict access to only from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore
Id String - name String
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the credential owner.
- read
Only Boolean - Indicates whether the credential is only usable for read operations. Only applicable when purpose is
STORAGE
. - skip
Validation Boolean - Suppress validation errors if any & force save the credential.
- updated
At Integer - updated
By String - used
For BooleanManaged Storage
- purpose string
- Indicates the purpose of the credential. Can be
SERVICE
orSTORAGE
. - aws
Iam CredentialRole Aws Iam Role - azure
Managed CredentialIdentity Azure Managed Identity - azure
Service CredentialPrincipal Azure Service Principal - comment string
- created
At number - created
By string - databricks
Gcp CredentialService Account Databricks Gcp Service Account - force
Destroy boolean - Delete credential regardless of its dependencies.
- force
Update boolean - Update credential regardless of its dependents.
- full
Name string - isolation
Mode string Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically restrict access to only from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore
Id string - name string
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner string
- Username/groupname/sp application_id of the credential owner.
- read
Only boolean - Indicates whether the credential is only usable for read operations. Only applicable when purpose is
STORAGE
. - skip
Validation boolean - Suppress validation errors if any & force save the credential.
- updated
At number - updated
By string - used
For booleanManaged Storage
- purpose str
- Indicates the purpose of the credential. Can be
SERVICE
orSTORAGE
. - aws_
iam_ Credentialrole Aws Iam Role Args - azure_
managed_ Credentialidentity Azure Managed Identity Args - azure_
service_ Credentialprincipal Azure Service Principal Args - comment str
- created_
at int - created_
by str - databricks_
gcp_ Credentialservice_ account Databricks Gcp Service Account Args - force_
destroy bool - Delete credential regardless of its dependencies.
- force_
update bool - Update credential regardless of its dependents.
- full_
name str - isolation_
mode str Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically restrict access to only from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore_
id str - name str
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner str
- Username/groupname/sp application_id of the credential owner.
- read_
only bool - Indicates whether the credential is only usable for read operations. Only applicable when purpose is
STORAGE
. - skip_
validation bool - Suppress validation errors if any & force save the credential.
- updated_
at int - updated_
by str - used_
for_ boolmanaged_ storage
- purpose String
- Indicates the purpose of the credential. Can be
SERVICE
orSTORAGE
. - aws
Iam Property MapRole - azure
Managed Property MapIdentity - azure
Service Property MapPrincipal - comment String
- created
At Number - created
By String - databricks
Gcp Property MapService Account - force
Destroy Boolean - Delete credential regardless of its dependencies.
- force
Update Boolean - Update credential regardless of its dependents.
- full
Name String - isolation
Mode String Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically restrict access to only from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore
Id String - name String
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the credential owner.
- read
Only Boolean - Indicates whether the credential is only usable for read operations. Only applicable when purpose is
STORAGE
. - skip
Validation Boolean - Suppress validation errors if any & force save the credential.
- updated
At Number - updated
By String - used
For BooleanManaged Storage
Outputs
All input properties are implicitly available as output properties. Additionally, the Credential resource produces the following output properties:
- Credential
Id string - Unique ID of the credential.
- Id string
- The provider-assigned unique ID for this managed resource.
- Credential
Id string - Unique ID of the credential.
- Id string
- The provider-assigned unique ID for this managed resource.
- credential
Id String - Unique ID of the credential.
- id String
- The provider-assigned unique ID for this managed resource.
- credential
Id string - Unique ID of the credential.
- id string
- The provider-assigned unique ID for this managed resource.
- credential_
id str - Unique ID of the credential.
- id str
- The provider-assigned unique ID for this managed resource.
- credential
Id String - Unique ID of the credential.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing Credential Resource
Get an existing Credential resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: CredentialState, opts?: CustomResourceOptions): Credential
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
aws_iam_role: Optional[CredentialAwsIamRoleArgs] = None,
azure_managed_identity: Optional[CredentialAzureManagedIdentityArgs] = None,
azure_service_principal: Optional[CredentialAzureServicePrincipalArgs] = None,
comment: Optional[str] = None,
created_at: Optional[int] = None,
created_by: Optional[str] = None,
credential_id: Optional[str] = None,
databricks_gcp_service_account: Optional[CredentialDatabricksGcpServiceAccountArgs] = None,
force_destroy: Optional[bool] = None,
force_update: Optional[bool] = None,
full_name: Optional[str] = None,
isolation_mode: Optional[str] = None,
metastore_id: Optional[str] = None,
name: Optional[str] = None,
owner: Optional[str] = None,
purpose: Optional[str] = None,
read_only: Optional[bool] = None,
skip_validation: Optional[bool] = None,
updated_at: Optional[int] = None,
updated_by: Optional[str] = None,
used_for_managed_storage: Optional[bool] = None) -> Credential
func GetCredential(ctx *Context, name string, id IDInput, state *CredentialState, opts ...ResourceOption) (*Credential, error)
public static Credential Get(string name, Input<string> id, CredentialState? state, CustomResourceOptions? opts = null)
public static Credential get(String name, Output<String> id, CredentialState state, CustomResourceOptions options)
resources: _: type: databricks:Credential get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Aws
Iam CredentialRole Aws Iam Role - Azure
Managed CredentialIdentity Azure Managed Identity - Azure
Service CredentialPrincipal Azure Service Principal - Comment string
- Created
At int - Created
By string - Credential
Id string - Unique ID of the credential.
- Databricks
Gcp CredentialService Account Databricks Gcp Service Account - Force
Destroy bool - Delete credential regardless of its dependencies.
- Force
Update bool - Update credential regardless of its dependents.
- Full
Name string - Isolation
Mode string Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically restrict access to only from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- Metastore
Id string - Name string
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the credential owner.
- Purpose string
- Indicates the purpose of the credential. Can be
SERVICE
orSTORAGE
. - Read
Only bool - Indicates whether the credential is only usable for read operations. Only applicable when purpose is
STORAGE
. - Skip
Validation bool - Suppress validation errors if any & force save the credential.
- Updated
At int - Updated
By string - Used
For boolManaged Storage
- Aws
Iam CredentialRole Aws Iam Role Args - Azure
Managed CredentialIdentity Azure Managed Identity Args - Azure
Service CredentialPrincipal Azure Service Principal Args - Comment string
- Created
At int - Created
By string - Credential
Id string - Unique ID of the credential.
- Databricks
Gcp CredentialService Account Databricks Gcp Service Account Args - Force
Destroy bool - Delete credential regardless of its dependencies.
- Force
Update bool - Update credential regardless of its dependents.
- Full
Name string - Isolation
Mode string Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically restrict access to only from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- Metastore
Id string - Name string
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the credential owner.
- Purpose string
- Indicates the purpose of the credential. Can be
SERVICE
orSTORAGE
. - Read
Only bool - Indicates whether the credential is only usable for read operations. Only applicable when purpose is
STORAGE
. - Skip
Validation bool - Suppress validation errors if any & force save the credential.
- Updated
At int - Updated
By string - Used
For boolManaged Storage
- aws
Iam CredentialRole Aws Iam Role - azure
Managed CredentialIdentity Azure Managed Identity - azure
Service CredentialPrincipal Azure Service Principal - comment String
- created
At Integer - created
By String - credential
Id String - Unique ID of the credential.
- databricks
Gcp CredentialService Account Databricks Gcp Service Account - force
Destroy Boolean - Delete credential regardless of its dependencies.
- force
Update Boolean - Update credential regardless of its dependents.
- full
Name String - isolation
Mode String Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically restrict access to only from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore
Id String - name String
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the credential owner.
- purpose String
- Indicates the purpose of the credential. Can be
SERVICE
orSTORAGE
. - read
Only Boolean - Indicates whether the credential is only usable for read operations. Only applicable when purpose is
STORAGE
. - skip
Validation Boolean - Suppress validation errors if any & force save the credential.
- updated
At Integer - updated
By String - used
For BooleanManaged Storage
- aws
Iam CredentialRole Aws Iam Role - azure
Managed CredentialIdentity Azure Managed Identity - azure
Service CredentialPrincipal Azure Service Principal - comment string
- created
At number - created
By string - credential
Id string - Unique ID of the credential.
- databricks
Gcp CredentialService Account Databricks Gcp Service Account - force
Destroy boolean - Delete credential regardless of its dependencies.
- force
Update boolean - Update credential regardless of its dependents.
- full
Name string - isolation
Mode string Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically restrict access to only from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore
Id string - name string
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner string
- Username/groupname/sp application_id of the credential owner.
- purpose string
- Indicates the purpose of the credential. Can be
SERVICE
orSTORAGE
. - read
Only boolean - Indicates whether the credential is only usable for read operations. Only applicable when purpose is
STORAGE
. - skip
Validation boolean - Suppress validation errors if any & force save the credential.
- updated
At number - updated
By string - used
For booleanManaged Storage
- aws_
iam_ Credentialrole Aws Iam Role Args - azure_
managed_ Credentialidentity Azure Managed Identity Args - azure_
service_ Credentialprincipal Azure Service Principal Args - comment str
- created_
at int - created_
by str - credential_
id str - Unique ID of the credential.
- databricks_
gcp_ Credentialservice_ account Databricks Gcp Service Account Args - force_
destroy bool - Delete credential regardless of its dependencies.
- force_
update bool - Update credential regardless of its dependents.
- full_
name str - isolation_
mode str Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically restrict access to only from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore_
id str - name str
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner str
- Username/groupname/sp application_id of the credential owner.
- purpose str
- Indicates the purpose of the credential. Can be
SERVICE
orSTORAGE
. - read_
only bool - Indicates whether the credential is only usable for read operations. Only applicable when purpose is
STORAGE
. - skip_
validation bool - Suppress validation errors if any & force save the credential.
- updated_
at int - updated_
by str - used_
for_ boolmanaged_ storage
- aws
Iam Property MapRole - azure
Managed Property MapIdentity - azure
Service Property MapPrincipal - comment String
- created
At Number - created
By String - credential
Id String - Unique ID of the credential.
- databricks
Gcp Property MapService Account - force
Destroy Boolean - Delete credential regardless of its dependencies.
- force
Update Boolean - Update credential regardless of its dependents.
- full
Name String - isolation
Mode String Whether the credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically restrict access to only from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore
Id String - name String
- Name of Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the credential owner.
- purpose String
- Indicates the purpose of the credential. Can be
SERVICE
orSTORAGE
. - read
Only Boolean - Indicates whether the credential is only usable for read operations. Only applicable when purpose is
STORAGE
. - skip
Validation Boolean - Suppress validation errors if any & force save the credential.
- updated
At Number - updated
By String - used
For BooleanManaged Storage
Supporting Types
CredentialAwsIamRole, CredentialAwsIamRoleArgs
- External
Id string - Role
Arn string The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended overazure_service_principal
):- Unity
Catalog stringIam Arn
- External
Id string - Role
Arn string The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended overazure_service_principal
):- Unity
Catalog stringIam Arn
- external
Id String - role
Arn String The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended overazure_service_principal
):- unity
Catalog StringIam Arn
- external
Id string - role
Arn string The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended overazure_service_principal
):- unity
Catalog stringIam Arn
- external_
id str - role_
arn str The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended overazure_service_principal
):- unity_
catalog_ striam_ arn
- external
Id String - role
Arn String The Amazon Resource Name (ARN) of the AWS IAM role you want to use to setup the trust policy, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended overazure_service_principal
):- unity
Catalog StringIam Arn
CredentialAzureManagedIdentity, CredentialAzureManagedIdentityArgs
- Access
Connector stringId - The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
. - Credential
Id string - Unique ID of the credential.
- Managed
Identity stringId The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name
.azure_service_principal
optional configuration block to use service principal as credential details for Azure. Only applicable when purpose isSTORAGE
(Legacy):
- Access
Connector stringId - The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
. - Credential
Id string - Unique ID of the credential.
- Managed
Identity stringId The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name
.azure_service_principal
optional configuration block to use service principal as credential details for Azure. Only applicable when purpose isSTORAGE
(Legacy):
- access
Connector StringId - The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
. - credential
Id String - Unique ID of the credential.
- managed
Identity StringId The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name
.azure_service_principal
optional configuration block to use service principal as credential details for Azure. Only applicable when purpose isSTORAGE
(Legacy):
- access
Connector stringId - The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
. - credential
Id string - Unique ID of the credential.
- managed
Identity stringId The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name
.azure_service_principal
optional configuration block to use service principal as credential details for Azure. Only applicable when purpose isSTORAGE
(Legacy):
- access_
connector_ strid - The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
. - credential_
id str - Unique ID of the credential.
- managed_
identity_ strid The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name
.azure_service_principal
optional configuration block to use service principal as credential details for Azure. Only applicable when purpose isSTORAGE
(Legacy):
- access
Connector StringId - The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
. - credential
Id String - Unique ID of the credential.
- managed
Identity StringId The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name
.azure_service_principal
optional configuration block to use service principal as credential details for Azure. Only applicable when purpose isSTORAGE
(Legacy):
CredentialAzureServicePrincipal, CredentialAzureServicePrincipalArgs
- Application
Id string - The application ID of the application registration within the referenced AAD tenant
- Client
Secret string The client secret generated for the above app ID in AAD. This field is redacted on output
databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account. Only applicable when purpose isSTORAGE
:- Directory
Id string - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- Application
Id string - The application ID of the application registration within the referenced AAD tenant
- Client
Secret string The client secret generated for the above app ID in AAD. This field is redacted on output
databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account. Only applicable when purpose isSTORAGE
:- Directory
Id string - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application
Id String - The application ID of the application registration within the referenced AAD tenant
- client
Secret String The client secret generated for the above app ID in AAD. This field is redacted on output
databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account. Only applicable when purpose isSTORAGE
:- directory
Id String - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application
Id string - The application ID of the application registration within the referenced AAD tenant
- client
Secret string The client secret generated for the above app ID in AAD. This field is redacted on output
databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account. Only applicable when purpose isSTORAGE
:- directory
Id string - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application_
id str - The application ID of the application registration within the referenced AAD tenant
- client_
secret str The client secret generated for the above app ID in AAD. This field is redacted on output
databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account. Only applicable when purpose isSTORAGE
:- directory_
id str - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application
Id String - The application ID of the application registration within the referenced AAD tenant
- client
Secret String The client secret generated for the above app ID in AAD. This field is redacted on output
databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account. Only applicable when purpose isSTORAGE
:- directory
Id String - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
CredentialDatabricksGcpServiceAccount, CredentialDatabricksGcpServiceAccountArgs
- Credential
Id string - Unique ID of the credential.
- Email string
- The email of the GCP service account created, to be granted access to relevant buckets.
- Private
Key stringId
- Credential
Id string - Unique ID of the credential.
- Email string
- The email of the GCP service account created, to be granted access to relevant buckets.
- Private
Key stringId
- credential
Id String - Unique ID of the credential.
- email String
- The email of the GCP service account created, to be granted access to relevant buckets.
- private
Key StringId
- credential
Id string - Unique ID of the credential.
- email string
- The email of the GCP service account created, to be granted access to relevant buckets.
- private
Key stringId
- credential_
id str - Unique ID of the credential.
- email str
- The email of the GCP service account created, to be granted access to relevant buckets.
- private_
key_ strid
- credential
Id String - Unique ID of the credential.
- email String
- The email of the GCP service account created, to be granted access to relevant buckets.
- private
Key StringId
Import
This resource can be imported by name:
bash
$ pulumi import databricks:index/credential:Credential this <name>
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
databricks
Terraform Provider.