databricks logo
Databricks v1.14.0, May 23 23

databricks.StorageCredential

Explore with Pulumi AI

To work with external tables, Unity Catalog introduces two new objects to access and work with external cloud storage:

  • databricks.StorageCredential represents authentication methods to access cloud storage (e.g. an IAM role for Amazon S3 or a service principal/managed identity for Azure Storage). Storage credentials are access-controlled to determine which users can use the credential.
  • databricks.ExternalLocation are objects that combine a cloud storage path with a Storage Credential that can be used to access the location.

Example Usage

For AWS

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var external = new Databricks.StorageCredential("external", new()
    {
        AwsIamRole = new Databricks.Inputs.StorageCredentialAwsIamRoleArgs
        {
            RoleArn = aws_iam_role.External_data_access.Arn,
        },
        Comment = "Managed by TF",
    });

    var externalCreds = new Databricks.Grants("externalCreds", new()
    {
        StorageCredential = external.Id,
        GrantDetails = new[]
        {
            new Databricks.Inputs.GrantsGrantArgs
            {
                Principal = "Data Engineers",
                Privileges = new[]
                {
                    "CREATE_TABLE",
                },
            },
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
			AwsIamRole: &databricks.StorageCredentialAwsIamRoleArgs{
				RoleArn: pulumi.Any(aws_iam_role.External_data_access.Arn),
			},
			Comment: pulumi.String("Managed by TF"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewGrants(ctx, "externalCreds", &databricks.GrantsArgs{
			StorageCredential: external.ID(),
			Grants: databricks.GrantsGrantArray{
				&databricks.GrantsGrantArgs{
					Principal: pulumi.String("Data Engineers"),
					Privileges: pulumi.StringArray{
						pulumi.String("CREATE_TABLE"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.StorageCredential;
import com.pulumi.databricks.StorageCredentialArgs;
import com.pulumi.databricks.inputs.StorageCredentialAwsIamRoleArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var external = new StorageCredential("external", StorageCredentialArgs.builder()        
            .awsIamRole(StorageCredentialAwsIamRoleArgs.builder()
                .roleArn(aws_iam_role.external_data_access().arn())
                .build())
            .comment("Managed by TF")
            .build());

        var externalCreds = new Grants("externalCreds", GrantsArgs.builder()        
            .storageCredential(external.id())
            .grants(GrantsGrantArgs.builder()
                .principal("Data Engineers")
                .privileges("CREATE_TABLE")
                .build())
            .build());

    }
}
import pulumi
import pulumi_databricks as databricks

external = databricks.StorageCredential("external",
    aws_iam_role=databricks.StorageCredentialAwsIamRoleArgs(
        role_arn=%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference),
    ),
    comment="Managed by TF")
external_creds = databricks.Grants("externalCreds",
    storage_credential=external.id,
    grants=[databricks.GrantsGrantArgs(
        principal="Data Engineers",
        privileges=["CREATE_TABLE"],
    )])
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const external = new databricks.StorageCredential("external", {
    awsIamRole: {
        roleArn: aws_iam_role.external_data_access.arn,
    },
    comment: "Managed by TF",
});
const externalCreds = new databricks.Grants("externalCreds", {
    storageCredential: external.id,
    grants: [{
        principal: "Data Engineers",
        privileges: ["CREATE_TABLE"],
    }],
});
resources:
  external:
    type: databricks:StorageCredential
    properties:
      awsIamRole:
        roleArn: ${aws_iam_role.external_data_access.arn}
      comment: Managed by TF
  externalCreds:
    type: databricks:Grants
    properties:
      storageCredential: ${external.id}
      grants:
        - principal: Data Engineers
          privileges:
            - CREATE_TABLE

For Azure

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var externalMi = new Databricks.StorageCredential("externalMi", new()
    {
        AzureManagedIdentity = new Databricks.Inputs.StorageCredentialAzureManagedIdentityArgs
        {
            AccessConnectorId = azurerm_databricks_access_connector.Example.Id,
        },
        Comment = "Managed identity credential managed by TF",
    });

    var externalCreds = new Databricks.Grants("externalCreds", new()
    {
        StorageCredential = databricks_storage_credential.External.Id,
        GrantDetails = new[]
        {
            new Databricks.Inputs.GrantsGrantArgs
            {
                Principal = "Data Engineers",
                Privileges = new[]
                {
                    "CREATE_TABLE",
                },
            },
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewStorageCredential(ctx, "externalMi", &databricks.StorageCredentialArgs{
			AzureManagedIdentity: &databricks.StorageCredentialAzureManagedIdentityArgs{
				AccessConnectorId: pulumi.Any(azurerm_databricks_access_connector.Example.Id),
			},
			Comment: pulumi.String("Managed identity credential managed by TF"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewGrants(ctx, "externalCreds", &databricks.GrantsArgs{
			StorageCredential: pulumi.Any(databricks_storage_credential.External.Id),
			Grants: databricks.GrantsGrantArray{
				&databricks.GrantsGrantArgs{
					Principal: pulumi.String("Data Engineers"),
					Privileges: pulumi.StringArray{
						pulumi.String("CREATE_TABLE"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.StorageCredential;
import com.pulumi.databricks.StorageCredentialArgs;
import com.pulumi.databricks.inputs.StorageCredentialAzureManagedIdentityArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var externalMi = new StorageCredential("externalMi", StorageCredentialArgs.builder()        
            .azureManagedIdentity(StorageCredentialAzureManagedIdentityArgs.builder()
                .accessConnectorId(azurerm_databricks_access_connector.example().id())
                .build())
            .comment("Managed identity credential managed by TF")
            .build());

        var externalCreds = new Grants("externalCreds", GrantsArgs.builder()        
            .storageCredential(databricks_storage_credential.external().id())
            .grants(GrantsGrantArgs.builder()
                .principal("Data Engineers")
                .privileges("CREATE_TABLE")
                .build())
            .build());

    }
}
import pulumi
import pulumi_databricks as databricks

external_mi = databricks.StorageCredential("externalMi",
    azure_managed_identity=databricks.StorageCredentialAzureManagedIdentityArgs(
        access_connector_id=%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference),
    ),
    comment="Managed identity credential managed by TF")
external_creds = databricks.Grants("externalCreds",
    storage_credential=databricks_storage_credential["external"]["id"],
    grants=[databricks.GrantsGrantArgs(
        principal="Data Engineers",
        privileges=["CREATE_TABLE"],
    )])
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const externalMi = new databricks.StorageCredential("externalMi", {
    azureManagedIdentity: {
        accessConnectorId: azurerm_databricks_access_connector.example.id,
    },
    comment: "Managed identity credential managed by TF",
});
const externalCreds = new databricks.Grants("externalCreds", {
    storageCredential: databricks_storage_credential.external.id,
    grants: [{
        principal: "Data Engineers",
        privileges: ["CREATE_TABLE"],
    }],
});
resources:
  externalMi:
    type: databricks:StorageCredential
    properties:
      azureManagedIdentity:
        accessConnectorId: ${azurerm_databricks_access_connector.example.id}
      comment: Managed identity credential managed by TF
  externalCreds:
    type: databricks:Grants
    properties:
      storageCredential: ${databricks_storage_credential.external.id}
      grants:
        - principal: Data Engineers
          privileges:
            - CREATE_TABLE

For GCP

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var external = new Databricks.StorageCredential("external", new()
    {
        DatabricksGcpServiceAccount = null,
    });

    var externalCreds = new Databricks.Grants("externalCreds", new()
    {
        StorageCredential = external.Id,
        GrantDetails = new[]
        {
            new Databricks.Inputs.GrantsGrantArgs
            {
                Principal = "Data Engineers",
                Privileges = new[]
                {
                    "CREATE_TABLE",
                },
            },
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
			DatabricksGcpServiceAccount: nil,
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewGrants(ctx, "externalCreds", &databricks.GrantsArgs{
			StorageCredential: external.ID(),
			Grants: databricks.GrantsGrantArray{
				&databricks.GrantsGrantArgs{
					Principal: pulumi.String("Data Engineers"),
					Privileges: pulumi.StringArray{
						pulumi.String("CREATE_TABLE"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.StorageCredential;
import com.pulumi.databricks.StorageCredentialArgs;
import com.pulumi.databricks.inputs.StorageCredentialDatabricksGcpServiceAccountArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var external = new StorageCredential("external", StorageCredentialArgs.builder()        
            .databricksGcpServiceAccount()
            .build());

        var externalCreds = new Grants("externalCreds", GrantsArgs.builder()        
            .storageCredential(external.id())
            .grants(GrantsGrantArgs.builder()
                .principal("Data Engineers")
                .privileges("CREATE_TABLE")
                .build())
            .build());

    }
}
import pulumi
import pulumi_databricks as databricks

external = databricks.StorageCredential("external", databricks_gcp_service_account=databricks.StorageCredentialDatabricksGcpServiceAccountArgs())
external_creds = databricks.Grants("externalCreds",
    storage_credential=external.id,
    grants=[databricks.GrantsGrantArgs(
        principal="Data Engineers",
        privileges=["CREATE_TABLE"],
    )])
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const external = new databricks.StorageCredential("external", {databricksGcpServiceAccount: {}});
const externalCreds = new databricks.Grants("externalCreds", {
    storageCredential: external.id,
    grants: [{
        principal: "Data Engineers",
        privileges: ["CREATE_TABLE"],
    }],
});
resources:
  external:
    type: databricks:StorageCredential
    properties:
      databricksGcpServiceAccount: {}
  externalCreds:
    type: databricks:Grants
    properties:
      storageCredential: ${external.id}
      grants:
        - principal: Data Engineers
          privileges:
            - CREATE_TABLE

Create StorageCredential Resource

new StorageCredential(name: string, args?: StorageCredentialArgs, opts?: CustomResourceOptions);
@overload
def StorageCredential(resource_name: str,
                      opts: Optional[ResourceOptions] = None,
                      aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
                      azure_managed_identity: Optional[StorageCredentialAzureManagedIdentityArgs] = None,
                      azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
                      comment: Optional[str] = None,
                      databricks_gcp_service_account: Optional[StorageCredentialDatabricksGcpServiceAccountArgs] = None,
                      gcp_service_account_key: Optional[StorageCredentialGcpServiceAccountKeyArgs] = None,
                      metastore_id: Optional[str] = None,
                      name: Optional[str] = None,
                      owner: Optional[str] = None)
@overload
def StorageCredential(resource_name: str,
                      args: Optional[StorageCredentialArgs] = None,
                      opts: Optional[ResourceOptions] = None)
func NewStorageCredential(ctx *Context, name string, args *StorageCredentialArgs, opts ...ResourceOption) (*StorageCredential, error)
public StorageCredential(string name, StorageCredentialArgs? args = null, CustomResourceOptions? opts = null)
public StorageCredential(String name, StorageCredentialArgs args)
public StorageCredential(String name, StorageCredentialArgs args, CustomResourceOptions options)
type: databricks:StorageCredential
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args StorageCredentialArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args StorageCredentialArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args StorageCredentialArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args StorageCredentialArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args StorageCredentialArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

StorageCredential Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The StorageCredential resource accepts the following input properties:

AwsIamRole StorageCredentialAwsIamRoleArgs
AzureManagedIdentity StorageCredentialAzureManagedIdentityArgs
AzureServicePrincipal StorageCredentialAzureServicePrincipalArgs
Comment string
DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountArgs
GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyArgs
MetastoreId string
Name string

Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.

Owner string

Username/groupname/sp application_id of the storage credential owner.

aws_iam_role optional configuration block for credential details for AWS:

AwsIamRole StorageCredentialAwsIamRoleArgs
AzureManagedIdentity StorageCredentialAzureManagedIdentityArgs
AzureServicePrincipal StorageCredentialAzureServicePrincipalArgs
Comment string
DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountArgs
GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyArgs
MetastoreId string
Name string

Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.

Owner string

Username/groupname/sp application_id of the storage credential owner.

aws_iam_role optional configuration block for credential details for AWS:

awsIamRole StorageCredentialAwsIamRoleArgs
azureManagedIdentity StorageCredentialAzureManagedIdentityArgs
azureServicePrincipal StorageCredentialAzureServicePrincipalArgs
comment String
databricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountArgs
gcpServiceAccountKey StorageCredentialGcpServiceAccountKeyArgs
metastoreId String
name String

Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.

owner String

Username/groupname/sp application_id of the storage credential owner.

aws_iam_role optional configuration block for credential details for AWS:

awsIamRole StorageCredentialAwsIamRoleArgs
azureManagedIdentity StorageCredentialAzureManagedIdentityArgs
azureServicePrincipal StorageCredentialAzureServicePrincipalArgs
comment string
databricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountArgs
gcpServiceAccountKey StorageCredentialGcpServiceAccountKeyArgs
metastoreId string
name string

Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.

owner string

Username/groupname/sp application_id of the storage credential owner.

aws_iam_role optional configuration block for credential details for AWS:

aws_iam_role StorageCredentialAwsIamRoleArgs
azure_managed_identity StorageCredentialAzureManagedIdentityArgs
azure_service_principal StorageCredentialAzureServicePrincipalArgs
comment str
databricks_gcp_service_account StorageCredentialDatabricksGcpServiceAccountArgs
gcp_service_account_key StorageCredentialGcpServiceAccountKeyArgs
metastore_id str
name str

Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.

owner str

Username/groupname/sp application_id of the storage credential owner.

aws_iam_role optional configuration block for credential details for AWS:

awsIamRole Property Map
azureManagedIdentity Property Map
azureServicePrincipal Property Map
comment String
databricksGcpServiceAccount Property Map
gcpServiceAccountKey Property Map
metastoreId String
name String

Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.

owner String

Username/groupname/sp application_id of the storage credential owner.

aws_iam_role optional configuration block for credential details for AWS:

Outputs

All input properties are implicitly available as output properties. Additionally, the StorageCredential resource produces the following output properties:

Id string

The provider-assigned unique ID for this managed resource.

Id string

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

id string

The provider-assigned unique ID for this managed resource.

id str

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

Look up Existing StorageCredential Resource

Get an existing StorageCredential resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: StorageCredentialState, opts?: CustomResourceOptions): StorageCredential
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
        azure_managed_identity: Optional[StorageCredentialAzureManagedIdentityArgs] = None,
        azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
        comment: Optional[str] = None,
        databricks_gcp_service_account: Optional[StorageCredentialDatabricksGcpServiceAccountArgs] = None,
        gcp_service_account_key: Optional[StorageCredentialGcpServiceAccountKeyArgs] = None,
        metastore_id: Optional[str] = None,
        name: Optional[str] = None,
        owner: Optional[str] = None) -> StorageCredential
func GetStorageCredential(ctx *Context, name string, id IDInput, state *StorageCredentialState, opts ...ResourceOption) (*StorageCredential, error)
public static StorageCredential Get(string name, Input<string> id, StorageCredentialState? state, CustomResourceOptions? opts = null)
public static StorageCredential get(String name, Output<String> id, StorageCredentialState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
AwsIamRole StorageCredentialAwsIamRoleArgs
AzureManagedIdentity StorageCredentialAzureManagedIdentityArgs
AzureServicePrincipal StorageCredentialAzureServicePrincipalArgs
Comment string
DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountArgs
GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyArgs
MetastoreId string
Name string

Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.

Owner string

Username/groupname/sp application_id of the storage credential owner.

aws_iam_role optional configuration block for credential details for AWS:

AwsIamRole StorageCredentialAwsIamRoleArgs
AzureManagedIdentity StorageCredentialAzureManagedIdentityArgs
AzureServicePrincipal StorageCredentialAzureServicePrincipalArgs
Comment string
DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountArgs
GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyArgs
MetastoreId string
Name string

Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.

Owner string

Username/groupname/sp application_id of the storage credential owner.

aws_iam_role optional configuration block for credential details for AWS:

awsIamRole StorageCredentialAwsIamRoleArgs
azureManagedIdentity StorageCredentialAzureManagedIdentityArgs
azureServicePrincipal StorageCredentialAzureServicePrincipalArgs
comment String
databricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountArgs
gcpServiceAccountKey StorageCredentialGcpServiceAccountKeyArgs
metastoreId String
name String

Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.

owner String

Username/groupname/sp application_id of the storage credential owner.

aws_iam_role optional configuration block for credential details for AWS:

awsIamRole StorageCredentialAwsIamRoleArgs
azureManagedIdentity StorageCredentialAzureManagedIdentityArgs
azureServicePrincipal StorageCredentialAzureServicePrincipalArgs
comment string
databricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountArgs
gcpServiceAccountKey StorageCredentialGcpServiceAccountKeyArgs
metastoreId string
name string

Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.

owner string

Username/groupname/sp application_id of the storage credential owner.

aws_iam_role optional configuration block for credential details for AWS:

aws_iam_role StorageCredentialAwsIamRoleArgs
azure_managed_identity StorageCredentialAzureManagedIdentityArgs
azure_service_principal StorageCredentialAzureServicePrincipalArgs
comment str
databricks_gcp_service_account StorageCredentialDatabricksGcpServiceAccountArgs
gcp_service_account_key StorageCredentialGcpServiceAccountKeyArgs
metastore_id str
name str

Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.

owner str

Username/groupname/sp application_id of the storage credential owner.

aws_iam_role optional configuration block for credential details for AWS:

awsIamRole Property Map
azureManagedIdentity Property Map
azureServicePrincipal Property Map
comment String
databricksGcpServiceAccount Property Map
gcpServiceAccountKey Property Map
metastoreId String
name String

Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.

owner String

Username/groupname/sp application_id of the storage credential owner.

aws_iam_role optional configuration block for credential details for AWS:

Supporting Types

StorageCredentialAwsIamRole

RoleArn string

The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF

azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

RoleArn string

The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF

azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

roleArn String

The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF

azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

roleArn string

The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF

azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

role_arn str

The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF

azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

roleArn String

The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF

azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

StorageCredentialAzureManagedIdentity

AccessConnectorId string

The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name

azure_service_principal optional configuration block to use service principal as credential details for Azure:

AccessConnectorId string

The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name

azure_service_principal optional configuration block to use service principal as credential details for Azure:

accessConnectorId String

The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name

azure_service_principal optional configuration block to use service principal as credential details for Azure:

accessConnectorId string

The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name

azure_service_principal optional configuration block to use service principal as credential details for Azure:

access_connector_id str

The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name

azure_service_principal optional configuration block to use service principal as credential details for Azure:

accessConnectorId String

The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name

azure_service_principal optional configuration block to use service principal as credential details for Azure:

StorageCredentialAzureServicePrincipal

ApplicationId string

The application ID of the application registration within the referenced AAD tenant

ClientSecret string

The client secret generated for the above app ID in AAD. This field is redacted on output

databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

DirectoryId string

The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application

ApplicationId string

The application ID of the application registration within the referenced AAD tenant

ClientSecret string

The client secret generated for the above app ID in AAD. This field is redacted on output

databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

DirectoryId string

The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application

applicationId String

The application ID of the application registration within the referenced AAD tenant

clientSecret String

The client secret generated for the above app ID in AAD. This field is redacted on output

databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

directoryId String

The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application

applicationId string

The application ID of the application registration within the referenced AAD tenant

clientSecret string

The client secret generated for the above app ID in AAD. This field is redacted on output

databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

directoryId string

The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application

application_id str

The application ID of the application registration within the referenced AAD tenant

client_secret str

The client secret generated for the above app ID in AAD. This field is redacted on output

databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

directory_id str

The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application

applicationId String

The application ID of the application registration within the referenced AAD tenant

clientSecret String

The client secret generated for the above app ID in AAD. This field is redacted on output

databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

directoryId String

The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application

StorageCredentialDatabricksGcpServiceAccount

Email string

The email of the GCP service account created, to be granted access to relevant buckets.

Email string

The email of the GCP service account created, to be granted access to relevant buckets.

email String

The email of the GCP service account created, to be granted access to relevant buckets.

email string

The email of the GCP service account created, to be granted access to relevant buckets.

email str

The email of the GCP service account created, to be granted access to relevant buckets.

email String

The email of the GCP service account created, to be granted access to relevant buckets.

StorageCredentialGcpServiceAccountKey

Email string

The email of the GCP service account created, to be granted access to relevant buckets.

PrivateKey string
PrivateKeyId string
Email string

The email of the GCP service account created, to be granted access to relevant buckets.

PrivateKey string
PrivateKeyId string
email String

The email of the GCP service account created, to be granted access to relevant buckets.

privateKey String
privateKeyId String
email string

The email of the GCP service account created, to be granted access to relevant buckets.

privateKey string
privateKeyId string
email str

The email of the GCP service account created, to be granted access to relevant buckets.

private_key str
private_key_id str
email String

The email of the GCP service account created, to be granted access to relevant buckets.

privateKey String
privateKeyId String

Import

This resource can be imported by namebash

 $ pulumi import databricks:index/storageCredential:StorageCredential this <name>

Package Details

Repository
databricks pulumi/pulumi-databricks
License
Apache-2.0
Notes

This Pulumi package is based on the databricks Terraform Provider.