databricks.StorageCredential
Explore with Pulumi AI
To work with external tables, Unity Catalog introduces two new objects to access and work with external cloud storage:
databricks.StorageCredential
represents authentication methods to access cloud storage (e.g. an IAM role for Amazon S3 or a service principal/managed identity for Azure Storage). Storage credentials are access-controlled to determine which users can use the credential.- databricks.ExternalLocation are objects that combine a cloud storage path with a Storage Credential that can be used to access the location.
Example Usage
For AWS
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var external = new Databricks.StorageCredential("external", new()
{
AwsIamRole = new Databricks.Inputs.StorageCredentialAwsIamRoleArgs
{
RoleArn = aws_iam_role.External_data_access.Arn,
},
Comment = "Managed by TF",
});
var externalCreds = new Databricks.Grants("externalCreds", new()
{
StorageCredential = external.Id,
GrantDetails = new[]
{
new Databricks.Inputs.GrantsGrantArgs
{
Principal = "Data Engineers",
Privileges = new[]
{
"CREATE_TABLE",
},
},
},
});
});
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
AwsIamRole: &databricks.StorageCredentialAwsIamRoleArgs{
RoleArn: pulumi.Any(aws_iam_role.External_data_access.Arn),
},
Comment: pulumi.String("Managed by TF"),
})
if err != nil {
return err
}
_, err = databricks.NewGrants(ctx, "externalCreds", &databricks.GrantsArgs{
StorageCredential: external.ID(),
Grants: databricks.GrantsGrantArray{
&databricks.GrantsGrantArgs{
Principal: pulumi.String("Data Engineers"),
Privileges: pulumi.StringArray{
pulumi.String("CREATE_TABLE"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.StorageCredential;
import com.pulumi.databricks.StorageCredentialArgs;
import com.pulumi.databricks.inputs.StorageCredentialAwsIamRoleArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var external = new StorageCredential("external", StorageCredentialArgs.builder()
.awsIamRole(StorageCredentialAwsIamRoleArgs.builder()
.roleArn(aws_iam_role.external_data_access().arn())
.build())
.comment("Managed by TF")
.build());
var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
.storageCredential(external.id())
.grants(GrantsGrantArgs.builder()
.principal("Data Engineers")
.privileges("CREATE_TABLE")
.build())
.build());
}
}
import pulumi
import pulumi_databricks as databricks
external = databricks.StorageCredential("external",
aws_iam_role=databricks.StorageCredentialAwsIamRoleArgs(
role_arn=%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference),
),
comment="Managed by TF")
external_creds = databricks.Grants("externalCreds",
storage_credential=external.id,
grants=[databricks.GrantsGrantArgs(
principal="Data Engineers",
privileges=["CREATE_TABLE"],
)])
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const external = new databricks.StorageCredential("external", {
awsIamRole: {
roleArn: aws_iam_role.external_data_access.arn,
},
comment: "Managed by TF",
});
const externalCreds = new databricks.Grants("externalCreds", {
storageCredential: external.id,
grants: [{
principal: "Data Engineers",
privileges: ["CREATE_TABLE"],
}],
});
resources:
external:
type: databricks:StorageCredential
properties:
awsIamRole:
roleArn: ${aws_iam_role.external_data_access.arn}
comment: Managed by TF
externalCreds:
type: databricks:Grants
properties:
storageCredential: ${external.id}
grants:
- principal: Data Engineers
privileges:
- CREATE_TABLE
For Azure
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var externalMi = new Databricks.StorageCredential("externalMi", new()
{
AzureManagedIdentity = new Databricks.Inputs.StorageCredentialAzureManagedIdentityArgs
{
AccessConnectorId = azurerm_databricks_access_connector.Example.Id,
},
Comment = "Managed identity credential managed by TF",
});
var externalCreds = new Databricks.Grants("externalCreds", new()
{
StorageCredential = databricks_storage_credential.External.Id,
GrantDetails = new[]
{
new Databricks.Inputs.GrantsGrantArgs
{
Principal = "Data Engineers",
Privileges = new[]
{
"CREATE_TABLE",
},
},
},
});
});
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewStorageCredential(ctx, "externalMi", &databricks.StorageCredentialArgs{
AzureManagedIdentity: &databricks.StorageCredentialAzureManagedIdentityArgs{
AccessConnectorId: pulumi.Any(azurerm_databricks_access_connector.Example.Id),
},
Comment: pulumi.String("Managed identity credential managed by TF"),
})
if err != nil {
return err
}
_, err = databricks.NewGrants(ctx, "externalCreds", &databricks.GrantsArgs{
StorageCredential: pulumi.Any(databricks_storage_credential.External.Id),
Grants: databricks.GrantsGrantArray{
&databricks.GrantsGrantArgs{
Principal: pulumi.String("Data Engineers"),
Privileges: pulumi.StringArray{
pulumi.String("CREATE_TABLE"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.StorageCredential;
import com.pulumi.databricks.StorageCredentialArgs;
import com.pulumi.databricks.inputs.StorageCredentialAzureManagedIdentityArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var externalMi = new StorageCredential("externalMi", StorageCredentialArgs.builder()
.azureManagedIdentity(StorageCredentialAzureManagedIdentityArgs.builder()
.accessConnectorId(azurerm_databricks_access_connector.example().id())
.build())
.comment("Managed identity credential managed by TF")
.build());
var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
.storageCredential(databricks_storage_credential.external().id())
.grants(GrantsGrantArgs.builder()
.principal("Data Engineers")
.privileges("CREATE_TABLE")
.build())
.build());
}
}
import pulumi
import pulumi_databricks as databricks
external_mi = databricks.StorageCredential("externalMi",
azure_managed_identity=databricks.StorageCredentialAzureManagedIdentityArgs(
access_connector_id=%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference),
),
comment="Managed identity credential managed by TF")
external_creds = databricks.Grants("externalCreds",
storage_credential=databricks_storage_credential["external"]["id"],
grants=[databricks.GrantsGrantArgs(
principal="Data Engineers",
privileges=["CREATE_TABLE"],
)])
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const externalMi = new databricks.StorageCredential("externalMi", {
azureManagedIdentity: {
accessConnectorId: azurerm_databricks_access_connector.example.id,
},
comment: "Managed identity credential managed by TF",
});
const externalCreds = new databricks.Grants("externalCreds", {
storageCredential: databricks_storage_credential.external.id,
grants: [{
principal: "Data Engineers",
privileges: ["CREATE_TABLE"],
}],
});
resources:
externalMi:
type: databricks:StorageCredential
properties:
azureManagedIdentity:
accessConnectorId: ${azurerm_databricks_access_connector.example.id}
comment: Managed identity credential managed by TF
externalCreds:
type: databricks:Grants
properties:
storageCredential: ${databricks_storage_credential.external.id}
grants:
- principal: Data Engineers
privileges:
- CREATE_TABLE
For GCP
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var external = new Databricks.StorageCredential("external", new()
{
DatabricksGcpServiceAccount = null,
});
var externalCreds = new Databricks.Grants("externalCreds", new()
{
StorageCredential = external.Id,
GrantDetails = new[]
{
new Databricks.Inputs.GrantsGrantArgs
{
Principal = "Data Engineers",
Privileges = new[]
{
"CREATE_TABLE",
},
},
},
});
});
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
DatabricksGcpServiceAccount: nil,
})
if err != nil {
return err
}
_, err = databricks.NewGrants(ctx, "externalCreds", &databricks.GrantsArgs{
StorageCredential: external.ID(),
Grants: databricks.GrantsGrantArray{
&databricks.GrantsGrantArgs{
Principal: pulumi.String("Data Engineers"),
Privileges: pulumi.StringArray{
pulumi.String("CREATE_TABLE"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.StorageCredential;
import com.pulumi.databricks.StorageCredentialArgs;
import com.pulumi.databricks.inputs.StorageCredentialDatabricksGcpServiceAccountArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var external = new StorageCredential("external", StorageCredentialArgs.builder()
.databricksGcpServiceAccount()
.build());
var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
.storageCredential(external.id())
.grants(GrantsGrantArgs.builder()
.principal("Data Engineers")
.privileges("CREATE_TABLE")
.build())
.build());
}
}
import pulumi
import pulumi_databricks as databricks
external = databricks.StorageCredential("external", databricks_gcp_service_account=databricks.StorageCredentialDatabricksGcpServiceAccountArgs())
external_creds = databricks.Grants("externalCreds",
storage_credential=external.id,
grants=[databricks.GrantsGrantArgs(
principal="Data Engineers",
privileges=["CREATE_TABLE"],
)])
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const external = new databricks.StorageCredential("external", {databricksGcpServiceAccount: {}});
const externalCreds = new databricks.Grants("externalCreds", {
storageCredential: external.id,
grants: [{
principal: "Data Engineers",
privileges: ["CREATE_TABLE"],
}],
});
resources:
external:
type: databricks:StorageCredential
properties:
databricksGcpServiceAccount: {}
externalCreds:
type: databricks:Grants
properties:
storageCredential: ${external.id}
grants:
- principal: Data Engineers
privileges:
- CREATE_TABLE
Create StorageCredential Resource
new StorageCredential(name: string, args?: StorageCredentialArgs, opts?: CustomResourceOptions);
@overload
def StorageCredential(resource_name: str,
opts: Optional[ResourceOptions] = None,
aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
azure_managed_identity: Optional[StorageCredentialAzureManagedIdentityArgs] = None,
azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
comment: Optional[str] = None,
databricks_gcp_service_account: Optional[StorageCredentialDatabricksGcpServiceAccountArgs] = None,
gcp_service_account_key: Optional[StorageCredentialGcpServiceAccountKeyArgs] = None,
metastore_id: Optional[str] = None,
name: Optional[str] = None,
owner: Optional[str] = None)
@overload
def StorageCredential(resource_name: str,
args: Optional[StorageCredentialArgs] = None,
opts: Optional[ResourceOptions] = None)
func NewStorageCredential(ctx *Context, name string, args *StorageCredentialArgs, opts ...ResourceOption) (*StorageCredential, error)
public StorageCredential(string name, StorageCredentialArgs? args = null, CustomResourceOptions? opts = null)
public StorageCredential(String name, StorageCredentialArgs args)
public StorageCredential(String name, StorageCredentialArgs args, CustomResourceOptions options)
type: databricks:StorageCredential
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
StorageCredential Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The StorageCredential resource accepts the following input properties:
- Aws
Iam StorageRole Credential Aws Iam Role Args - Azure
Managed StorageIdentity Credential Azure Managed Identity Args - Azure
Service StoragePrincipal Credential Azure Service Principal Args - Comment string
- Databricks
Gcp StorageService Account Credential Databricks Gcp Service Account Args - Gcp
Service StorageAccount Key Credential Gcp Service Account Key Args - Metastore
Id string - Name string
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
Username/groupname/sp application_id of the storage credential owner.
aws_iam_role
optional configuration block for credential details for AWS:
- Aws
Iam StorageRole Credential Aws Iam Role Args - Azure
Managed StorageIdentity Credential Azure Managed Identity Args - Azure
Service StoragePrincipal Credential Azure Service Principal Args - Comment string
- Databricks
Gcp StorageService Account Credential Databricks Gcp Service Account Args - Gcp
Service StorageAccount Key Credential Gcp Service Account Key Args - Metastore
Id string - Name string
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
Username/groupname/sp application_id of the storage credential owner.
aws_iam_role
optional configuration block for credential details for AWS:
- aws
Iam StorageRole Credential Aws Iam Role Args - azure
Managed StorageIdentity Credential Azure Managed Identity Args - azure
Service StoragePrincipal Credential Azure Service Principal Args - comment String
- databricks
Gcp StorageService Account Credential Databricks Gcp Service Account Args - gcp
Service StorageAccount Key Credential Gcp Service Account Key Args - metastore
Id String - name String
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
Username/groupname/sp application_id of the storage credential owner.
aws_iam_role
optional configuration block for credential details for AWS:
- aws
Iam StorageRole Credential Aws Iam Role Args - azure
Managed StorageIdentity Credential Azure Managed Identity Args - azure
Service StoragePrincipal Credential Azure Service Principal Args - comment string
- databricks
Gcp StorageService Account Credential Databricks Gcp Service Account Args - gcp
Service StorageAccount Key Credential Gcp Service Account Key Args - metastore
Id string - name string
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner string
Username/groupname/sp application_id of the storage credential owner.
aws_iam_role
optional configuration block for credential details for AWS:
- aws_
iam_ Storagerole Credential Aws Iam Role Args - azure_
managed_ Storageidentity Credential Azure Managed Identity Args - azure_
service_ Storageprincipal Credential Azure Service Principal Args - comment str
- databricks_
gcp_ Storageservice_ account Credential Databricks Gcp Service Account Args - gcp_
service_ Storageaccount_ key Credential Gcp Service Account Key Args - metastore_
id str - name str
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner str
Username/groupname/sp application_id of the storage credential owner.
aws_iam_role
optional configuration block for credential details for AWS:
- aws
Iam Property MapRole - azure
Managed Property MapIdentity - azure
Service Property MapPrincipal - comment String
- databricks
Gcp Property MapService Account - gcp
Service Property MapAccount Key - metastore
Id String - name String
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
Username/groupname/sp application_id of the storage credential owner.
aws_iam_role
optional configuration block for credential details for AWS:
Outputs
All input properties are implicitly available as output properties. Additionally, the StorageCredential resource produces the following output properties:
- Id string
The provider-assigned unique ID for this managed resource.
- Id string
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
- id string
The provider-assigned unique ID for this managed resource.
- id str
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
Look up Existing StorageCredential Resource
Get an existing StorageCredential resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: StorageCredentialState, opts?: CustomResourceOptions): StorageCredential
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
azure_managed_identity: Optional[StorageCredentialAzureManagedIdentityArgs] = None,
azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
comment: Optional[str] = None,
databricks_gcp_service_account: Optional[StorageCredentialDatabricksGcpServiceAccountArgs] = None,
gcp_service_account_key: Optional[StorageCredentialGcpServiceAccountKeyArgs] = None,
metastore_id: Optional[str] = None,
name: Optional[str] = None,
owner: Optional[str] = None) -> StorageCredential
func GetStorageCredential(ctx *Context, name string, id IDInput, state *StorageCredentialState, opts ...ResourceOption) (*StorageCredential, error)
public static StorageCredential Get(string name, Input<string> id, StorageCredentialState? state, CustomResourceOptions? opts = null)
public static StorageCredential get(String name, Output<String> id, StorageCredentialState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Aws
Iam StorageRole Credential Aws Iam Role Args - Azure
Managed StorageIdentity Credential Azure Managed Identity Args - Azure
Service StoragePrincipal Credential Azure Service Principal Args - Comment string
- Databricks
Gcp StorageService Account Credential Databricks Gcp Service Account Args - Gcp
Service StorageAccount Key Credential Gcp Service Account Key Args - Metastore
Id string - Name string
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
Username/groupname/sp application_id of the storage credential owner.
aws_iam_role
optional configuration block for credential details for AWS:
- Aws
Iam StorageRole Credential Aws Iam Role Args - Azure
Managed StorageIdentity Credential Azure Managed Identity Args - Azure
Service StoragePrincipal Credential Azure Service Principal Args - Comment string
- Databricks
Gcp StorageService Account Credential Databricks Gcp Service Account Args - Gcp
Service StorageAccount Key Credential Gcp Service Account Key Args - Metastore
Id string - Name string
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
Username/groupname/sp application_id of the storage credential owner.
aws_iam_role
optional configuration block for credential details for AWS:
- aws
Iam StorageRole Credential Aws Iam Role Args - azure
Managed StorageIdentity Credential Azure Managed Identity Args - azure
Service StoragePrincipal Credential Azure Service Principal Args - comment String
- databricks
Gcp StorageService Account Credential Databricks Gcp Service Account Args - gcp
Service StorageAccount Key Credential Gcp Service Account Key Args - metastore
Id String - name String
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
Username/groupname/sp application_id of the storage credential owner.
aws_iam_role
optional configuration block for credential details for AWS:
- aws
Iam StorageRole Credential Aws Iam Role Args - azure
Managed StorageIdentity Credential Azure Managed Identity Args - azure
Service StoragePrincipal Credential Azure Service Principal Args - comment string
- databricks
Gcp StorageService Account Credential Databricks Gcp Service Account Args - gcp
Service StorageAccount Key Credential Gcp Service Account Key Args - metastore
Id string - name string
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner string
Username/groupname/sp application_id of the storage credential owner.
aws_iam_role
optional configuration block for credential details for AWS:
- aws_
iam_ Storagerole Credential Aws Iam Role Args - azure_
managed_ Storageidentity Credential Azure Managed Identity Args - azure_
service_ Storageprincipal Credential Azure Service Principal Args - comment str
- databricks_
gcp_ Storageservice_ account Credential Databricks Gcp Service Account Args - gcp_
service_ Storageaccount_ key Credential Gcp Service Account Key Args - metastore_
id str - name str
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner str
Username/groupname/sp application_id of the storage credential owner.
aws_iam_role
optional configuration block for credential details for AWS:
- aws
Iam Property MapRole - azure
Managed Property MapIdentity - azure
Service Property MapPrincipal - comment String
- databricks
Gcp Property MapService Account - gcp
Service Property MapAccount Key - metastore
Id String - name String
Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
Username/groupname/sp application_id of the storage credential owner.
aws_iam_role
optional configuration block for credential details for AWS:
Supporting Types
StorageCredentialAwsIamRole
- Role
Arn string The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended over service principal):
- Role
Arn string The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended over service principal):
- role
Arn String The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended over service principal):
- role
Arn string The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended over service principal):
- role_
arn str The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended over service principal):
- role
Arn String The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended over service principal):
StorageCredentialAzureManagedIdentity
- Access
Connector stringId The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
azure_service_principal
optional configuration block to use service principal as credential details for Azure:
- Access
Connector stringId The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
azure_service_principal
optional configuration block to use service principal as credential details for Azure:
- access
Connector StringId The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
azure_service_principal
optional configuration block to use service principal as credential details for Azure:
- access
Connector stringId The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
azure_service_principal
optional configuration block to use service principal as credential details for Azure:
- access_
connector_ strid The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
azure_service_principal
optional configuration block to use service principal as credential details for Azure:
- access
Connector StringId The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
azure_service_principal
optional configuration block to use service principal as credential details for Azure:
StorageCredentialAzureServicePrincipal
- Application
Id string The application ID of the application registration within the referenced AAD tenant
- Client
Secret string The client secret generated for the above app ID in AAD. This field is redacted on output
databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account:- Directory
Id string The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- Application
Id string The application ID of the application registration within the referenced AAD tenant
- Client
Secret string The client secret generated for the above app ID in AAD. This field is redacted on output
databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account:- Directory
Id string The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application
Id String The application ID of the application registration within the referenced AAD tenant
- client
Secret String The client secret generated for the above app ID in AAD. This field is redacted on output
databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account:- directory
Id String The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application
Id string The application ID of the application registration within the referenced AAD tenant
- client
Secret string The client secret generated for the above app ID in AAD. This field is redacted on output
databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account:- directory
Id string The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application_
id str The application ID of the application registration within the referenced AAD tenant
- client_
secret str The client secret generated for the above app ID in AAD. This field is redacted on output
databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account:- directory_
id str The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application
Id String The application ID of the application registration within the referenced AAD tenant
- client
Secret String The client secret generated for the above app ID in AAD. This field is redacted on output
databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account:- directory
Id String The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
StorageCredentialDatabricksGcpServiceAccount
- Email string
The email of the GCP service account created, to be granted access to relevant buckets.
- Email string
The email of the GCP service account created, to be granted access to relevant buckets.
- email String
The email of the GCP service account created, to be granted access to relevant buckets.
- email string
The email of the GCP service account created, to be granted access to relevant buckets.
- email str
The email of the GCP service account created, to be granted access to relevant buckets.
- email String
The email of the GCP service account created, to be granted access to relevant buckets.
StorageCredentialGcpServiceAccountKey
- Email string
The email of the GCP service account created, to be granted access to relevant buckets.
- Private
Key string - Private
Key stringId
- Email string
The email of the GCP service account created, to be granted access to relevant buckets.
- Private
Key string - Private
Key stringId
- email String
The email of the GCP service account created, to be granted access to relevant buckets.
- private
Key String - private
Key StringId
- email string
The email of the GCP service account created, to be granted access to relevant buckets.
- private
Key string - private
Key stringId
- email str
The email of the GCP service account created, to be granted access to relevant buckets.
- private_
key str - private_
key_ strid
- email String
The email of the GCP service account created, to be granted access to relevant buckets.
- private
Key String - private
Key StringId
Import
This resource can be imported by namebash
$ pulumi import databricks:index/storageCredential:StorageCredential this <name>
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
databricks
Terraform Provider.