published on Monday, Mar 9, 2026 by Pulumi
published on Monday, Mar 9, 2026 by Pulumi
Private Preview This feature is in Private Preview. Contact your Databricks representative to request access.
To work with external tables, Unity Catalog introduces two new objects to access and work with external cloud storage:
databricks.StorageCredentialrepresents authentication methods to access cloud storage (e.g. an IAM role for Amazon S3 or a service principal for Azure Storage). Storage credentials are access-controlled to determine which users can use the credential.- databricks.ExternalLocation are objects that combine a cloud storage path with a Storage Credential that can be used to access the location.
Example Usage
For AWS
using Pulumi;
using Databricks = Pulumi.Databricks;
class MyStack : Stack
{
public MyStack()
{
var external = new Databricks.StorageCredential("external", new Databricks.StorageCredentialArgs
{
AwsIamRole = new Databricks.Inputs.StorageCredentialAwsIamRoleArgs
{
RoleArn = aws_iam_role.External_data_access.Arn,
},
Comment = "Managed by TF",
});
var externalCreds = new Databricks.Grants("externalCreds", new Databricks.GrantsArgs
{
StorageCredential = external.Id,
Grants =
{
new Databricks.Inputs.GrantsGrantArgs
{
Principal = "Data Engineers",
Privileges =
{
"CREATE TABLE",
},
},
},
});
}
}
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
AwsIamRole: &StorageCredentialAwsIamRoleArgs{
RoleArn: pulumi.Any(aws_iam_role.External_data_access.Arn),
},
Comment: pulumi.String("Managed by TF"),
})
if err != nil {
return err
}
_, err = databricks.NewGrants(ctx, "externalCreds", &databricks.GrantsArgs{
StorageCredential: external.ID(),
Grants: GrantsGrantArray{
&GrantsGrantArgs{
Principal: pulumi.String("Data Engineers"),
Privileges: pulumi.StringArray{
pulumi.String("CREATE TABLE"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
Example coming soon!
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const external = new databricks.StorageCredential("external", {
awsIamRole: {
roleArn: aws_iam_role.external_data_access.arn,
},
comment: "Managed by TF",
});
const externalCreds = new databricks.Grants("externalCreds", {
storageCredential: external.id,
grants: [{
principal: "Data Engineers",
privileges: ["CREATE TABLE"],
}],
});
import pulumi
import pulumi_databricks as databricks
external = databricks.StorageCredential("external",
aws_iam_role=databricks.StorageCredentialAwsIamRoleArgs(
role_arn=aws_iam_role["external_data_access"]["arn"],
),
comment="Managed by TF")
external_creds = databricks.Grants("externalCreds",
storage_credential=external.id,
grants=[databricks.GrantsGrantArgs(
principal="Data Engineers",
privileges=["CREATE TABLE"],
)])
Example coming soon!
For Azure
using Pulumi;
using Databricks = Pulumi.Databricks;
class MyStack : Stack
{
public MyStack()
{
var external = new Databricks.StorageCredential("external", new Databricks.StorageCredentialArgs
{
AzureServicePrincipal = new Databricks.Inputs.StorageCredentialAzureServicePrincipalArgs
{
DirectoryId = @var.Tenant_id,
ApplicationId = azuread_application.Ext_cred.Application_id,
ClientSecret = azuread_application_password.Ext_cred.Value,
},
Comment = "Managed by TF",
});
var externalCreds = new Databricks.Grants("externalCreds", new Databricks.GrantsArgs
{
StorageCredential = external.Id,
Grants =
{
new Databricks.Inputs.GrantsGrantArgs
{
Principal = "Data Engineers",
Privileges =
{
"CREATE TABLE",
},
},
},
});
}
}
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
AzureServicePrincipal: &StorageCredentialAzureServicePrincipalArgs{
DirectoryId: pulumi.Any(_var.Tenant_id),
ApplicationId: pulumi.Any(azuread_application.Ext_cred.Application_id),
ClientSecret: pulumi.Any(azuread_application_password.Ext_cred.Value),
},
Comment: pulumi.String("Managed by TF"),
})
if err != nil {
return err
}
_, err = databricks.NewGrants(ctx, "externalCreds", &databricks.GrantsArgs{
StorageCredential: external.ID(),
Grants: GrantsGrantArray{
&GrantsGrantArgs{
Principal: pulumi.String("Data Engineers"),
Privileges: pulumi.StringArray{
pulumi.String("CREATE TABLE"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
Example coming soon!
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const external = new databricks.StorageCredential("external", {
azureServicePrincipal: {
directoryId: _var.tenant_id,
applicationId: azuread_application.ext_cred.application_id,
clientSecret: azuread_application_password.ext_cred.value,
},
comment: "Managed by TF",
});
const externalCreds = new databricks.Grants("externalCreds", {
storageCredential: external.id,
grants: [{
principal: "Data Engineers",
privileges: ["CREATE TABLE"],
}],
});
import pulumi
import pulumi_databricks as databricks
external = databricks.StorageCredential("external",
azure_service_principal=databricks.StorageCredentialAzureServicePrincipalArgs(
directory_id=var["tenant_id"],
application_id=azuread_application["ext_cred"]["application_id"],
client_secret=azuread_application_password["ext_cred"]["value"],
),
comment="Managed by TF")
external_creds = databricks.Grants("externalCreds",
storage_credential=external.id,
grants=[databricks.GrantsGrantArgs(
principal="Data Engineers",
privileges=["CREATE TABLE"],
)])
Example coming soon!
Create StorageCredential Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new StorageCredential(name: string, args?: StorageCredentialArgs, opts?: CustomResourceOptions);@overload
def StorageCredential(resource_name: str,
args: Optional[StorageCredentialArgs] = None,
opts: Optional[ResourceOptions] = None)
@overload
def StorageCredential(resource_name: str,
opts: Optional[ResourceOptions] = None,
aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
comment: Optional[str] = None,
metastore_id: Optional[str] = None,
name: Optional[str] = None)func NewStorageCredential(ctx *Context, name string, args *StorageCredentialArgs, opts ...ResourceOption) (*StorageCredential, error)public StorageCredential(string name, StorageCredentialArgs? args = null, CustomResourceOptions? opts = null)
public StorageCredential(String name, StorageCredentialArgs args)
public StorageCredential(String name, StorageCredentialArgs args, CustomResourceOptions options)
type: databricks:StorageCredential
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var storageCredentialResource = new Databricks.StorageCredential("storageCredentialResource", new()
{
AwsIamRole = new Databricks.Inputs.StorageCredentialAwsIamRoleArgs
{
RoleArn = "string",
},
AzureServicePrincipal = new Databricks.Inputs.StorageCredentialAzureServicePrincipalArgs
{
ApplicationId = "string",
ClientSecret = "string",
DirectoryId = "string",
},
Comment = "string",
MetastoreId = "string",
Name = "string",
});
example, err := databricks.NewStorageCredential(ctx, "storageCredentialResource", &databricks.StorageCredentialArgs{
AwsIamRole: &databricks.StorageCredentialAwsIamRoleArgs{
RoleArn: pulumi.String("string"),
},
AzureServicePrincipal: &databricks.StorageCredentialAzureServicePrincipalArgs{
ApplicationId: pulumi.String("string"),
ClientSecret: pulumi.String("string"),
DirectoryId: pulumi.String("string"),
},
Comment: pulumi.String("string"),
MetastoreId: pulumi.String("string"),
Name: pulumi.String("string"),
})
var storageCredentialResource = new StorageCredential("storageCredentialResource", StorageCredentialArgs.builder()
.awsIamRole(StorageCredentialAwsIamRoleArgs.builder()
.roleArn("string")
.build())
.azureServicePrincipal(StorageCredentialAzureServicePrincipalArgs.builder()
.applicationId("string")
.clientSecret("string")
.directoryId("string")
.build())
.comment("string")
.metastoreId("string")
.name("string")
.build());
storage_credential_resource = databricks.StorageCredential("storageCredentialResource",
aws_iam_role={
"role_arn": "string",
},
azure_service_principal={
"application_id": "string",
"client_secret": "string",
"directory_id": "string",
},
comment="string",
metastore_id="string",
name="string")
const storageCredentialResource = new databricks.StorageCredential("storageCredentialResource", {
awsIamRole: {
roleArn: "string",
},
azureServicePrincipal: {
applicationId: "string",
clientSecret: "string",
directoryId: "string",
},
comment: "string",
metastoreId: "string",
name: "string",
});
type: databricks:StorageCredential
properties:
awsIamRole:
roleArn: string
azureServicePrincipal:
applicationId: string
clientSecret: string
directoryId: string
comment: string
metastoreId: string
name: string
StorageCredential Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The StorageCredential resource accepts the following input properties:
- Aws
Iam StorageRole Credential Aws Iam Role - Azure
Service StoragePrincipal Credential Azure Service Principal - Comment string
- Metastore
Id string - Name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Aws
Iam StorageRole Credential Aws Iam Role Args - Azure
Service StoragePrincipal Credential Azure Service Principal Args - Comment string
- Metastore
Id string - Name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- aws
Iam StorageRole Credential Aws Iam Role - azure
Service StoragePrincipal Credential Azure Service Principal - comment String
- metastore
Id String - name String
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- aws
Iam StorageRole Credential Aws Iam Role - azure
Service StoragePrincipal Credential Azure Service Principal - comment string
- metastore
Id string - name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- aws_
iam_ Storagerole Credential Aws Iam Role Args - azure_
service_ Storageprincipal Credential Azure Service Principal Args - comment str
- metastore_
id str - name str
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- aws
Iam Property MapRole - azure
Service Property MapPrincipal - comment String
- metastore
Id String - name String
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
Outputs
All input properties are implicitly available as output properties. Additionally, the StorageCredential resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing StorageCredential Resource
Get an existing StorageCredential resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: StorageCredentialState, opts?: CustomResourceOptions): StorageCredential@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
comment: Optional[str] = None,
metastore_id: Optional[str] = None,
name: Optional[str] = None) -> StorageCredentialfunc GetStorageCredential(ctx *Context, name string, id IDInput, state *StorageCredentialState, opts ...ResourceOption) (*StorageCredential, error)public static StorageCredential Get(string name, Input<string> id, StorageCredentialState? state, CustomResourceOptions? opts = null)public static StorageCredential get(String name, Output<String> id, StorageCredentialState state, CustomResourceOptions options)resources: _: type: databricks:StorageCredential get: id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Aws
Iam StorageRole Credential Aws Iam Role - Azure
Service StoragePrincipal Credential Azure Service Principal - Comment string
- Metastore
Id string - Name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Aws
Iam StorageRole Credential Aws Iam Role Args - Azure
Service StoragePrincipal Credential Azure Service Principal Args - Comment string
- Metastore
Id string - Name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- aws
Iam StorageRole Credential Aws Iam Role - azure
Service StoragePrincipal Credential Azure Service Principal - comment String
- metastore
Id String - name String
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- aws
Iam StorageRole Credential Aws Iam Role - azure
Service StoragePrincipal Credential Azure Service Principal - comment string
- metastore
Id string - name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- aws_
iam_ Storagerole Credential Aws Iam Role Args - azure_
service_ Storageprincipal Credential Azure Service Principal Args - comment str
- metastore_
id str - name str
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- aws
Iam Property MapRole - azure
Service Property MapPrincipal - comment String
- metastore
Id String - name String
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
Supporting Types
StorageCredentialAwsIamRole, StorageCredentialAwsIamRoleArgs
- Role
Arn string - The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
- Role
Arn string - The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
- role
Arn String - The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
- role
Arn string - The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
- role_
arn str - The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
- role
Arn String - The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
StorageCredentialAzureServicePrincipal, StorageCredentialAzureServicePrincipalArgs
- Application
Id string - The application ID of the application registration within the referenced AAD tenant
- Client
Secret string - The client secret generated for the above app ID in AAD. This field is redacted on output
- Directory
Id string - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- Application
Id string - The application ID of the application registration within the referenced AAD tenant
- Client
Secret string - The client secret generated for the above app ID in AAD. This field is redacted on output
- Directory
Id string - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application
Id String - The application ID of the application registration within the referenced AAD tenant
- client
Secret String - The client secret generated for the above app ID in AAD. This field is redacted on output
- directory
Id String - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application
Id string - The application ID of the application registration within the referenced AAD tenant
- client
Secret string - The client secret generated for the above app ID in AAD. This field is redacted on output
- directory
Id string - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application_
id str - The application ID of the application registration within the referenced AAD tenant
- client_
secret str - The client secret generated for the above app ID in AAD. This field is redacted on output
- directory_
id str - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application
Id String - The application ID of the application registration within the referenced AAD tenant
- client
Secret String - The client secret generated for the above app ID in AAD. This field is redacted on output
- directory
Id String - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
Import
This resource can be imported by namebash
$ pulumi import databricks:index/storageCredential:StorageCredential this <name>
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
databricksTerraform Provider.
published on Monday, Mar 9, 2026 by Pulumi
