1. Packages
  2. Databricks
  3. API Docs
  4. StorageCredential
Databricks v1.34.0 published on Tuesday, Mar 5, 2024 by Pulumi

databricks.StorageCredential

Explore with Pulumi AI

databricks logo
Databricks v1.34.0 published on Tuesday, Mar 5, 2024 by Pulumi

    Note This resource could be used with account or workspace-level provider.

    To work with external tables, Unity Catalog introduces two new objects to access and work with external cloud storage:

    • databricks.StorageCredential represents authentication methods to access cloud storage (e.g. an IAM role for Amazon S3 or a service principal/managed identity for Azure Storage). Storage credentials are access-controlled to determine which users can use the credential.
    • databricks.ExternalLocation are objects that combine a cloud storage path with a Storage Credential that can be used to access the location.

    Example Usage

    For AWS

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var external = new Databricks.StorageCredential("external", new()
        {
            AwsIamRole = new Databricks.Inputs.StorageCredentialAwsIamRoleArgs
            {
                RoleArn = aws_iam_role.External_data_access.Arn,
            },
            Comment = "Managed by TF",
        });
    
        var externalCreds = new Databricks.Grants("externalCreds", new()
        {
            StorageCredential = external.Id,
            GrantDetails = new[]
            {
                new Databricks.Inputs.GrantsGrantArgs
                {
                    Principal = "Data Engineers",
                    Privileges = new[]
                    {
                        "CREATE_EXTERNAL_TABLE",
                    },
                },
            },
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
    			AwsIamRole: &databricks.StorageCredentialAwsIamRoleArgs{
    				RoleArn: pulumi.Any(aws_iam_role.External_data_access.Arn),
    			},
    			Comment: pulumi.String("Managed by TF"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewGrants(ctx, "externalCreds", &databricks.GrantsArgs{
    			StorageCredential: external.ID(),
    			Grants: databricks.GrantsGrantArray{
    				&databricks.GrantsGrantArgs{
    					Principal: pulumi.String("Data Engineers"),
    					Privileges: pulumi.StringArray{
    						pulumi.String("CREATE_EXTERNAL_TABLE"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.StorageCredential;
    import com.pulumi.databricks.StorageCredentialArgs;
    import com.pulumi.databricks.inputs.StorageCredentialAwsIamRoleArgs;
    import com.pulumi.databricks.Grants;
    import com.pulumi.databricks.GrantsArgs;
    import com.pulumi.databricks.inputs.GrantsGrantArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var external = new StorageCredential("external", StorageCredentialArgs.builder()        
                .awsIamRole(StorageCredentialAwsIamRoleArgs.builder()
                    .roleArn(aws_iam_role.external_data_access().arn())
                    .build())
                .comment("Managed by TF")
                .build());
    
            var externalCreds = new Grants("externalCreds", GrantsArgs.builder()        
                .storageCredential(external.id())
                .grants(GrantsGrantArgs.builder()
                    .principal("Data Engineers")
                    .privileges("CREATE_EXTERNAL_TABLE")
                    .build())
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    external = databricks.StorageCredential("external",
        aws_iam_role=databricks.StorageCredentialAwsIamRoleArgs(
            role_arn=aws_iam_role["external_data_access"]["arn"],
        ),
        comment="Managed by TF")
    external_creds = databricks.Grants("externalCreds",
        storage_credential=external.id,
        grants=[databricks.GrantsGrantArgs(
            principal="Data Engineers",
            privileges=["CREATE_EXTERNAL_TABLE"],
        )])
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const external = new databricks.StorageCredential("external", {
        awsIamRole: {
            roleArn: aws_iam_role.external_data_access.arn,
        },
        comment: "Managed by TF",
    });
    const externalCreds = new databricks.Grants("externalCreds", {
        storageCredential: external.id,
        grants: [{
            principal: "Data Engineers",
            privileges: ["CREATE_EXTERNAL_TABLE"],
        }],
    });
    
    resources:
      external:
        type: databricks:StorageCredential
        properties:
          awsIamRole:
            roleArn: ${aws_iam_role.external_data_access.arn}
          comment: Managed by TF
      externalCreds:
        type: databricks:Grants
        properties:
          storageCredential: ${external.id}
          grants:
            - principal: Data Engineers
              privileges:
                - CREATE_EXTERNAL_TABLE
    

    For Azure

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var externalMi = new Databricks.StorageCredential("externalMi", new()
        {
            AzureManagedIdentity = new Databricks.Inputs.StorageCredentialAzureManagedIdentityArgs
            {
                AccessConnectorId = azurerm_databricks_access_connector.Example.Id,
            },
            Comment = "Managed identity credential managed by TF",
        });
    
        var externalCreds = new Databricks.Grants("externalCreds", new()
        {
            StorageCredential = databricks_storage_credential.External.Id,
            GrantDetails = new[]
            {
                new Databricks.Inputs.GrantsGrantArgs
                {
                    Principal = "Data Engineers",
                    Privileges = new[]
                    {
                        "CREATE_EXTERNAL_TABLE",
                    },
                },
            },
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewStorageCredential(ctx, "externalMi", &databricks.StorageCredentialArgs{
    			AzureManagedIdentity: &databricks.StorageCredentialAzureManagedIdentityArgs{
    				AccessConnectorId: pulumi.Any(azurerm_databricks_access_connector.Example.Id),
    			},
    			Comment: pulumi.String("Managed identity credential managed by TF"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewGrants(ctx, "externalCreds", &databricks.GrantsArgs{
    			StorageCredential: pulumi.Any(databricks_storage_credential.External.Id),
    			Grants: databricks.GrantsGrantArray{
    				&databricks.GrantsGrantArgs{
    					Principal: pulumi.String("Data Engineers"),
    					Privileges: pulumi.StringArray{
    						pulumi.String("CREATE_EXTERNAL_TABLE"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.StorageCredential;
    import com.pulumi.databricks.StorageCredentialArgs;
    import com.pulumi.databricks.inputs.StorageCredentialAzureManagedIdentityArgs;
    import com.pulumi.databricks.Grants;
    import com.pulumi.databricks.GrantsArgs;
    import com.pulumi.databricks.inputs.GrantsGrantArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var externalMi = new StorageCredential("externalMi", StorageCredentialArgs.builder()        
                .azureManagedIdentity(StorageCredentialAzureManagedIdentityArgs.builder()
                    .accessConnectorId(azurerm_databricks_access_connector.example().id())
                    .build())
                .comment("Managed identity credential managed by TF")
                .build());
    
            var externalCreds = new Grants("externalCreds", GrantsArgs.builder()        
                .storageCredential(databricks_storage_credential.external().id())
                .grants(GrantsGrantArgs.builder()
                    .principal("Data Engineers")
                    .privileges("CREATE_EXTERNAL_TABLE")
                    .build())
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    external_mi = databricks.StorageCredential("externalMi",
        azure_managed_identity=databricks.StorageCredentialAzureManagedIdentityArgs(
            access_connector_id=azurerm_databricks_access_connector["example"]["id"],
        ),
        comment="Managed identity credential managed by TF")
    external_creds = databricks.Grants("externalCreds",
        storage_credential=databricks_storage_credential["external"]["id"],
        grants=[databricks.GrantsGrantArgs(
            principal="Data Engineers",
            privileges=["CREATE_EXTERNAL_TABLE"],
        )])
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const externalMi = new databricks.StorageCredential("externalMi", {
        azureManagedIdentity: {
            accessConnectorId: azurerm_databricks_access_connector.example.id,
        },
        comment: "Managed identity credential managed by TF",
    });
    const externalCreds = new databricks.Grants("externalCreds", {
        storageCredential: databricks_storage_credential.external.id,
        grants: [{
            principal: "Data Engineers",
            privileges: ["CREATE_EXTERNAL_TABLE"],
        }],
    });
    
    resources:
      externalMi:
        type: databricks:StorageCredential
        properties:
          azureManagedIdentity:
            accessConnectorId: ${azurerm_databricks_access_connector.example.id}
          comment: Managed identity credential managed by TF
      externalCreds:
        type: databricks:Grants
        properties:
          storageCredential: ${databricks_storage_credential.external.id}
          grants:
            - principal: Data Engineers
              privileges:
                - CREATE_EXTERNAL_TABLE
    

    For GCP

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var external = new Databricks.StorageCredential("external", new()
        {
            DatabricksGcpServiceAccount = null,
        });
    
        var externalCreds = new Databricks.Grants("externalCreds", new()
        {
            StorageCredential = external.Id,
            GrantDetails = new[]
            {
                new Databricks.Inputs.GrantsGrantArgs
                {
                    Principal = "Data Engineers",
                    Privileges = new[]
                    {
                        "CREATE_EXTERNAL_TABLE",
                    },
                },
            },
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
    			DatabricksGcpServiceAccount: nil,
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewGrants(ctx, "externalCreds", &databricks.GrantsArgs{
    			StorageCredential: external.ID(),
    			Grants: databricks.GrantsGrantArray{
    				&databricks.GrantsGrantArgs{
    					Principal: pulumi.String("Data Engineers"),
    					Privileges: pulumi.StringArray{
    						pulumi.String("CREATE_EXTERNAL_TABLE"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.StorageCredential;
    import com.pulumi.databricks.StorageCredentialArgs;
    import com.pulumi.databricks.inputs.StorageCredentialDatabricksGcpServiceAccountArgs;
    import com.pulumi.databricks.Grants;
    import com.pulumi.databricks.GrantsArgs;
    import com.pulumi.databricks.inputs.GrantsGrantArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var external = new StorageCredential("external", StorageCredentialArgs.builder()        
                .databricksGcpServiceAccount()
                .build());
    
            var externalCreds = new Grants("externalCreds", GrantsArgs.builder()        
                .storageCredential(external.id())
                .grants(GrantsGrantArgs.builder()
                    .principal("Data Engineers")
                    .privileges("CREATE_EXTERNAL_TABLE")
                    .build())
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    external = databricks.StorageCredential("external", databricks_gcp_service_account=databricks.StorageCredentialDatabricksGcpServiceAccountArgs())
    external_creds = databricks.Grants("externalCreds",
        storage_credential=external.id,
        grants=[databricks.GrantsGrantArgs(
            principal="Data Engineers",
            privileges=["CREATE_EXTERNAL_TABLE"],
        )])
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const external = new databricks.StorageCredential("external", {databricksGcpServiceAccount: {}});
    const externalCreds = new databricks.Grants("externalCreds", {
        storageCredential: external.id,
        grants: [{
            principal: "Data Engineers",
            privileges: ["CREATE_EXTERNAL_TABLE"],
        }],
    });
    
    resources:
      external:
        type: databricks:StorageCredential
        properties:
          databricksGcpServiceAccount: {}
      externalCreds:
        type: databricks:Grants
        properties:
          storageCredential: ${external.id}
          grants:
            - principal: Data Engineers
              privileges:
                - CREATE_EXTERNAL_TABLE
    

    Create StorageCredential Resource

    new StorageCredential(name: string, args?: StorageCredentialArgs, opts?: CustomResourceOptions);
    @overload
    def StorageCredential(resource_name: str,
                          opts: Optional[ResourceOptions] = None,
                          aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
                          azure_managed_identity: Optional[StorageCredentialAzureManagedIdentityArgs] = None,
                          azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
                          comment: Optional[str] = None,
                          databricks_gcp_service_account: Optional[StorageCredentialDatabricksGcpServiceAccountArgs] = None,
                          force_destroy: Optional[bool] = None,
                          force_update: Optional[bool] = None,
                          gcp_service_account_key: Optional[StorageCredentialGcpServiceAccountKeyArgs] = None,
                          metastore_id: Optional[str] = None,
                          name: Optional[str] = None,
                          owner: Optional[str] = None,
                          read_only: Optional[bool] = None,
                          skip_validation: Optional[bool] = None)
    @overload
    def StorageCredential(resource_name: str,
                          args: Optional[StorageCredentialArgs] = None,
                          opts: Optional[ResourceOptions] = None)
    func NewStorageCredential(ctx *Context, name string, args *StorageCredentialArgs, opts ...ResourceOption) (*StorageCredential, error)
    public StorageCredential(string name, StorageCredentialArgs? args = null, CustomResourceOptions? opts = null)
    public StorageCredential(String name, StorageCredentialArgs args)
    public StorageCredential(String name, StorageCredentialArgs args, CustomResourceOptions options)
    
    type: databricks:StorageCredential
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    
    name string
    The unique name of the resource.
    args StorageCredentialArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args StorageCredentialArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args StorageCredentialArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args StorageCredentialArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args StorageCredentialArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    StorageCredential Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The StorageCredential resource accepts the following input properties:

    AwsIamRole StorageCredentialAwsIamRole
    AzureManagedIdentity StorageCredentialAzureManagedIdentity
    AzureServicePrincipal StorageCredentialAzureServicePrincipal
    Comment string
    DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccount
    ForceDestroy bool
    Delete storage credential regardless of its dependencies.
    ForceUpdate bool

    Update storage credential regardless of its dependents.

    aws_iam_role optional configuration block for credential details for AWS:

    GcpServiceAccountKey StorageCredentialGcpServiceAccountKey
    MetastoreId string
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    Name string
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    Owner string
    Username/groupname/sp application_id of the storage credential owner.
    ReadOnly bool
    Indicates whether the storage credential is only usable for read operations.
    SkipValidation bool
    Suppress validation errors if any & force save the storage credential.
    AwsIamRole StorageCredentialAwsIamRoleArgs
    AzureManagedIdentity StorageCredentialAzureManagedIdentityArgs
    AzureServicePrincipal StorageCredentialAzureServicePrincipalArgs
    Comment string
    DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountArgs
    ForceDestroy bool
    Delete storage credential regardless of its dependencies.
    ForceUpdate bool

    Update storage credential regardless of its dependents.

    aws_iam_role optional configuration block for credential details for AWS:

    GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyArgs
    MetastoreId string
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    Name string
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    Owner string
    Username/groupname/sp application_id of the storage credential owner.
    ReadOnly bool
    Indicates whether the storage credential is only usable for read operations.
    SkipValidation bool
    Suppress validation errors if any & force save the storage credential.
    awsIamRole StorageCredentialAwsIamRole
    azureManagedIdentity StorageCredentialAzureManagedIdentity
    azureServicePrincipal StorageCredentialAzureServicePrincipal
    comment String
    databricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccount
    forceDestroy Boolean
    Delete storage credential regardless of its dependencies.
    forceUpdate Boolean

    Update storage credential regardless of its dependents.

    aws_iam_role optional configuration block for credential details for AWS:

    gcpServiceAccountKey StorageCredentialGcpServiceAccountKey
    metastoreId String
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name String
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner String
    Username/groupname/sp application_id of the storage credential owner.
    readOnly Boolean
    Indicates whether the storage credential is only usable for read operations.
    skipValidation Boolean
    Suppress validation errors if any & force save the storage credential.
    awsIamRole StorageCredentialAwsIamRole
    azureManagedIdentity StorageCredentialAzureManagedIdentity
    azureServicePrincipal StorageCredentialAzureServicePrincipal
    comment string
    databricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccount
    forceDestroy boolean
    Delete storage credential regardless of its dependencies.
    forceUpdate boolean

    Update storage credential regardless of its dependents.

    aws_iam_role optional configuration block for credential details for AWS:

    gcpServiceAccountKey StorageCredentialGcpServiceAccountKey
    metastoreId string
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name string
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner string
    Username/groupname/sp application_id of the storage credential owner.
    readOnly boolean
    Indicates whether the storage credential is only usable for read operations.
    skipValidation boolean
    Suppress validation errors if any & force save the storage credential.
    aws_iam_role StorageCredentialAwsIamRoleArgs
    azure_managed_identity StorageCredentialAzureManagedIdentityArgs
    azure_service_principal StorageCredentialAzureServicePrincipalArgs
    comment str
    databricks_gcp_service_account StorageCredentialDatabricksGcpServiceAccountArgs
    force_destroy bool
    Delete storage credential regardless of its dependencies.
    force_update bool

    Update storage credential regardless of its dependents.

    aws_iam_role optional configuration block for credential details for AWS:

    gcp_service_account_key StorageCredentialGcpServiceAccountKeyArgs
    metastore_id str
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name str
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner str
    Username/groupname/sp application_id of the storage credential owner.
    read_only bool
    Indicates whether the storage credential is only usable for read operations.
    skip_validation bool
    Suppress validation errors if any & force save the storage credential.
    awsIamRole Property Map
    azureManagedIdentity Property Map
    azureServicePrincipal Property Map
    comment String
    databricksGcpServiceAccount Property Map
    forceDestroy Boolean
    Delete storage credential regardless of its dependencies.
    forceUpdate Boolean

    Update storage credential regardless of its dependents.

    aws_iam_role optional configuration block for credential details for AWS:

    gcpServiceAccountKey Property Map
    metastoreId String
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name String
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner String
    Username/groupname/sp application_id of the storage credential owner.
    readOnly Boolean
    Indicates whether the storage credential is only usable for read operations.
    skipValidation Boolean
    Suppress validation errors if any & force save the storage credential.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the StorageCredential resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing StorageCredential Resource

    Get an existing StorageCredential resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: StorageCredentialState, opts?: CustomResourceOptions): StorageCredential
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
            azure_managed_identity: Optional[StorageCredentialAzureManagedIdentityArgs] = None,
            azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
            comment: Optional[str] = None,
            databricks_gcp_service_account: Optional[StorageCredentialDatabricksGcpServiceAccountArgs] = None,
            force_destroy: Optional[bool] = None,
            force_update: Optional[bool] = None,
            gcp_service_account_key: Optional[StorageCredentialGcpServiceAccountKeyArgs] = None,
            metastore_id: Optional[str] = None,
            name: Optional[str] = None,
            owner: Optional[str] = None,
            read_only: Optional[bool] = None,
            skip_validation: Optional[bool] = None) -> StorageCredential
    func GetStorageCredential(ctx *Context, name string, id IDInput, state *StorageCredentialState, opts ...ResourceOption) (*StorageCredential, error)
    public static StorageCredential Get(string name, Input<string> id, StorageCredentialState? state, CustomResourceOptions? opts = null)
    public static StorageCredential get(String name, Output<String> id, StorageCredentialState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AwsIamRole StorageCredentialAwsIamRole
    AzureManagedIdentity StorageCredentialAzureManagedIdentity
    AzureServicePrincipal StorageCredentialAzureServicePrincipal
    Comment string
    DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccount
    ForceDestroy bool
    Delete storage credential regardless of its dependencies.
    ForceUpdate bool

    Update storage credential regardless of its dependents.

    aws_iam_role optional configuration block for credential details for AWS:

    GcpServiceAccountKey StorageCredentialGcpServiceAccountKey
    MetastoreId string
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    Name string
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    Owner string
    Username/groupname/sp application_id of the storage credential owner.
    ReadOnly bool
    Indicates whether the storage credential is only usable for read operations.
    SkipValidation bool
    Suppress validation errors if any & force save the storage credential.
    AwsIamRole StorageCredentialAwsIamRoleArgs
    AzureManagedIdentity StorageCredentialAzureManagedIdentityArgs
    AzureServicePrincipal StorageCredentialAzureServicePrincipalArgs
    Comment string
    DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountArgs
    ForceDestroy bool
    Delete storage credential regardless of its dependencies.
    ForceUpdate bool

    Update storage credential regardless of its dependents.

    aws_iam_role optional configuration block for credential details for AWS:

    GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyArgs
    MetastoreId string
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    Name string
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    Owner string
    Username/groupname/sp application_id of the storage credential owner.
    ReadOnly bool
    Indicates whether the storage credential is only usable for read operations.
    SkipValidation bool
    Suppress validation errors if any & force save the storage credential.
    awsIamRole StorageCredentialAwsIamRole
    azureManagedIdentity StorageCredentialAzureManagedIdentity
    azureServicePrincipal StorageCredentialAzureServicePrincipal
    comment String
    databricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccount
    forceDestroy Boolean
    Delete storage credential regardless of its dependencies.
    forceUpdate Boolean

    Update storage credential regardless of its dependents.

    aws_iam_role optional configuration block for credential details for AWS:

    gcpServiceAccountKey StorageCredentialGcpServiceAccountKey
    metastoreId String
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name String
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner String
    Username/groupname/sp application_id of the storage credential owner.
    readOnly Boolean
    Indicates whether the storage credential is only usable for read operations.
    skipValidation Boolean
    Suppress validation errors if any & force save the storage credential.
    awsIamRole StorageCredentialAwsIamRole
    azureManagedIdentity StorageCredentialAzureManagedIdentity
    azureServicePrincipal StorageCredentialAzureServicePrincipal
    comment string
    databricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccount
    forceDestroy boolean
    Delete storage credential regardless of its dependencies.
    forceUpdate boolean

    Update storage credential regardless of its dependents.

    aws_iam_role optional configuration block for credential details for AWS:

    gcpServiceAccountKey StorageCredentialGcpServiceAccountKey
    metastoreId string
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name string
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner string
    Username/groupname/sp application_id of the storage credential owner.
    readOnly boolean
    Indicates whether the storage credential is only usable for read operations.
    skipValidation boolean
    Suppress validation errors if any & force save the storage credential.
    aws_iam_role StorageCredentialAwsIamRoleArgs
    azure_managed_identity StorageCredentialAzureManagedIdentityArgs
    azure_service_principal StorageCredentialAzureServicePrincipalArgs
    comment str
    databricks_gcp_service_account StorageCredentialDatabricksGcpServiceAccountArgs
    force_destroy bool
    Delete storage credential regardless of its dependencies.
    force_update bool

    Update storage credential regardless of its dependents.

    aws_iam_role optional configuration block for credential details for AWS:

    gcp_service_account_key StorageCredentialGcpServiceAccountKeyArgs
    metastore_id str
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name str
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner str
    Username/groupname/sp application_id of the storage credential owner.
    read_only bool
    Indicates whether the storage credential is only usable for read operations.
    skip_validation bool
    Suppress validation errors if any & force save the storage credential.
    awsIamRole Property Map
    azureManagedIdentity Property Map
    azureServicePrincipal Property Map
    comment String
    databricksGcpServiceAccount Property Map
    forceDestroy Boolean
    Delete storage credential regardless of its dependencies.
    forceUpdate Boolean

    Update storage credential regardless of its dependents.

    aws_iam_role optional configuration block for credential details for AWS:

    gcpServiceAccountKey Property Map
    metastoreId String
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name String
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner String
    Username/groupname/sp application_id of the storage credential owner.
    readOnly Boolean
    Indicates whether the storage credential is only usable for read operations.
    skipValidation Boolean
    Suppress validation errors if any & force save the storage credential.

    Supporting Types

    StorageCredentialAwsIamRole, StorageCredentialAwsIamRoleArgs

    RoleArn string
    The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
    ExternalId string
    The external ID used in role assumption to prevent confused deputy problem.
    UnityCatalogIamArn string

    The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role.

    azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

    RoleArn string
    The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
    ExternalId string
    The external ID used in role assumption to prevent confused deputy problem.
    UnityCatalogIamArn string

    The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role.

    azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

    roleArn String
    The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
    externalId String
    The external ID used in role assumption to prevent confused deputy problem.
    unityCatalogIamArn String

    The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role.

    azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

    roleArn string
    The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
    externalId string
    The external ID used in role assumption to prevent confused deputy problem.
    unityCatalogIamArn string

    The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role.

    azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

    role_arn str
    The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
    external_id str
    The external ID used in role assumption to prevent confused deputy problem.
    unity_catalog_iam_arn str

    The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role.

    azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

    roleArn String
    The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
    externalId String
    The external ID used in role assumption to prevent confused deputy problem.
    unityCatalogIamArn String

    The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role.

    azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

    StorageCredentialAzureManagedIdentity, StorageCredentialAzureManagedIdentityArgs

    AccessConnectorId string
    The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
    CredentialId string
    ManagedIdentityId string

    The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.

    databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

    AccessConnectorId string
    The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
    CredentialId string
    ManagedIdentityId string

    The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.

    databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

    accessConnectorId String
    The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
    credentialId String
    managedIdentityId String

    The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.

    databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

    accessConnectorId string
    The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
    credentialId string
    managedIdentityId string

    The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.

    databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

    access_connector_id str
    The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
    credential_id str
    managed_identity_id str

    The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.

    databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

    accessConnectorId String
    The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
    credentialId String
    managedIdentityId String

    The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.

    databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

    StorageCredentialAzureServicePrincipal, StorageCredentialAzureServicePrincipalArgs

    ApplicationId string
    The application ID of the application registration within the referenced AAD tenant
    ClientSecret string
    The client secret generated for the above app ID in AAD. This field is redacted on output
    DirectoryId string
    The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
    ApplicationId string
    The application ID of the application registration within the referenced AAD tenant
    ClientSecret string
    The client secret generated for the above app ID in AAD. This field is redacted on output
    DirectoryId string
    The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
    applicationId String
    The application ID of the application registration within the referenced AAD tenant
    clientSecret String
    The client secret generated for the above app ID in AAD. This field is redacted on output
    directoryId String
    The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
    applicationId string
    The application ID of the application registration within the referenced AAD tenant
    clientSecret string
    The client secret generated for the above app ID in AAD. This field is redacted on output
    directoryId string
    The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
    application_id str
    The application ID of the application registration within the referenced AAD tenant
    client_secret str
    The client secret generated for the above app ID in AAD. This field is redacted on output
    directory_id str
    The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
    applicationId String
    The application ID of the application registration within the referenced AAD tenant
    clientSecret String
    The client secret generated for the above app ID in AAD. This field is redacted on output
    directoryId String
    The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application

    StorageCredentialDatabricksGcpServiceAccount, StorageCredentialDatabricksGcpServiceAccountArgs

    CredentialId string
    Email string

    The email of the GCP service account created, to be granted access to relevant buckets.

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    CredentialId string
    Email string

    The email of the GCP service account created, to be granted access to relevant buckets.

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    credentialId String
    email String

    The email of the GCP service account created, to be granted access to relevant buckets.

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    credentialId string
    email string

    The email of the GCP service account created, to be granted access to relevant buckets.

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    credential_id str
    email str

    The email of the GCP service account created, to be granted access to relevant buckets.

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    credentialId String
    email String

    The email of the GCP service account created, to be granted access to relevant buckets.

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    StorageCredentialGcpServiceAccountKey, StorageCredentialGcpServiceAccountKeyArgs

    Email string

    The email of the GCP service account created, to be granted access to relevant buckets.

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    PrivateKey string
    PrivateKeyId string
    Email string

    The email of the GCP service account created, to be granted access to relevant buckets.

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    PrivateKey string
    PrivateKeyId string
    email String

    The email of the GCP service account created, to be granted access to relevant buckets.

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    privateKey String
    privateKeyId String
    email string

    The email of the GCP service account created, to be granted access to relevant buckets.

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    privateKey string
    privateKeyId string
    email str

    The email of the GCP service account created, to be granted access to relevant buckets.

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    private_key str
    private_key_id str
    email String

    The email of the GCP service account created, to be granted access to relevant buckets.

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    privateKey String
    privateKeyId String

    Import

    This resource can be imported by name:

    bash

    $ pulumi import databricks:index/storageCredential:StorageCredential this <name>
    

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.34.0 published on Tuesday, Mar 5, 2024 by Pulumi