1. Packages
  2. Databricks
  3. API Docs
  4. ExternalLocation
Databricks v1.36.0 published on Friday, Apr 19, 2024 by Pulumi

databricks.ExternalLocation

Explore with Pulumi AI

databricks logo
Databricks v1.36.0 published on Friday, Apr 19, 2024 by Pulumi

    Note This resource could be only used with workspace-level provider!

    To work with external tables, Unity Catalog introduces two new objects to access and work with external cloud storage:

    • databricks.StorageCredential represent authentication methods to access cloud storage (e.g. an IAM role for Amazon S3 or a service principal for Azure Storage). Storage credentials are access-controlled to determine which users can use the credential.
    • databricks.ExternalLocation are objects that combine a cloud storage path with a Storage Credential that can be used to access the location.

    Example Usage

    For AWS

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const external = new databricks.StorageCredential("external", {
        name: externalDataAccess.name,
        awsIamRole: {
            roleArn: externalDataAccess.arn,
        },
        comment: "Managed by TF",
    });
    const some = new databricks.ExternalLocation("some", {
        name: "external",
        url: `s3://${externalAwsS3Bucket.id}/some`,
        credentialName: external.id,
        comment: "Managed by TF",
    });
    const someGrants = new databricks.Grants("some", {
        externalLocation: some.id,
        grants: [{
            principal: "Data Engineers",
            privileges: [
                "CREATE_EXTERNAL_TABLE",
                "READ_FILES",
            ],
        }],
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    external = databricks.StorageCredential("external",
        name=external_data_access["name"],
        aws_iam_role=databricks.StorageCredentialAwsIamRoleArgs(
            role_arn=external_data_access["arn"],
        ),
        comment="Managed by TF")
    some = databricks.ExternalLocation("some",
        name="external",
        url=f"s3://{external_aws_s3_bucket['id']}/some",
        credential_name=external.id,
        comment="Managed by TF")
    some_grants = databricks.Grants("some",
        external_location=some.id,
        grants=[databricks.GrantsGrantArgs(
            principal="Data Engineers",
            privileges=[
                "CREATE_EXTERNAL_TABLE",
                "READ_FILES",
            ],
        )])
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
    			Name: pulumi.Any(externalDataAccess.Name),
    			AwsIamRole: &databricks.StorageCredentialAwsIamRoleArgs{
    				RoleArn: pulumi.Any(externalDataAccess.Arn),
    			},
    			Comment: pulumi.String("Managed by TF"),
    		})
    		if err != nil {
    			return err
    		}
    		some, err := databricks.NewExternalLocation(ctx, "some", &databricks.ExternalLocationArgs{
    			Name:           pulumi.String("external"),
    			Url:            pulumi.String(fmt.Sprintf("s3://%v/some", externalAwsS3Bucket.Id)),
    			CredentialName: external.ID(),
    			Comment:        pulumi.String("Managed by TF"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewGrants(ctx, "some", &databricks.GrantsArgs{
    			ExternalLocation: some.ID(),
    			Grants: databricks.GrantsGrantArray{
    				&databricks.GrantsGrantArgs{
    					Principal: pulumi.String("Data Engineers"),
    					Privileges: pulumi.StringArray{
    						pulumi.String("CREATE_EXTERNAL_TABLE"),
    						pulumi.String("READ_FILES"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var external = new Databricks.StorageCredential("external", new()
        {
            Name = externalDataAccess.Name,
            AwsIamRole = new Databricks.Inputs.StorageCredentialAwsIamRoleArgs
            {
                RoleArn = externalDataAccess.Arn,
            },
            Comment = "Managed by TF",
        });
    
        var some = new Databricks.ExternalLocation("some", new()
        {
            Name = "external",
            Url = $"s3://{externalAwsS3Bucket.Id}/some",
            CredentialName = external.Id,
            Comment = "Managed by TF",
        });
    
        var someGrants = new Databricks.Grants("some", new()
        {
            ExternalLocation = some.Id,
            GrantDetails = new[]
            {
                new Databricks.Inputs.GrantsGrantArgs
                {
                    Principal = "Data Engineers",
                    Privileges = new[]
                    {
                        "CREATE_EXTERNAL_TABLE",
                        "READ_FILES",
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.StorageCredential;
    import com.pulumi.databricks.StorageCredentialArgs;
    import com.pulumi.databricks.inputs.StorageCredentialAwsIamRoleArgs;
    import com.pulumi.databricks.ExternalLocation;
    import com.pulumi.databricks.ExternalLocationArgs;
    import com.pulumi.databricks.Grants;
    import com.pulumi.databricks.GrantsArgs;
    import com.pulumi.databricks.inputs.GrantsGrantArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var external = new StorageCredential("external", StorageCredentialArgs.builder()        
                .name(externalDataAccess.name())
                .awsIamRole(StorageCredentialAwsIamRoleArgs.builder()
                    .roleArn(externalDataAccess.arn())
                    .build())
                .comment("Managed by TF")
                .build());
    
            var some = new ExternalLocation("some", ExternalLocationArgs.builder()        
                .name("external")
                .url(String.format("s3://%s/some", externalAwsS3Bucket.id()))
                .credentialName(external.id())
                .comment("Managed by TF")
                .build());
    
            var someGrants = new Grants("someGrants", GrantsArgs.builder()        
                .externalLocation(some.id())
                .grants(GrantsGrantArgs.builder()
                    .principal("Data Engineers")
                    .privileges(                
                        "CREATE_EXTERNAL_TABLE",
                        "READ_FILES")
                    .build())
                .build());
    
        }
    }
    
    resources:
      external:
        type: databricks:StorageCredential
        properties:
          name: ${externalDataAccess.name}
          awsIamRole:
            roleArn: ${externalDataAccess.arn}
          comment: Managed by TF
      some:
        type: databricks:ExternalLocation
        properties:
          name: external
          url: s3://${externalAwsS3Bucket.id}/some
          credentialName: ${external.id}
          comment: Managed by TF
      someGrants:
        type: databricks:Grants
        name: some
        properties:
          externalLocation: ${some.id}
          grants:
            - principal: Data Engineers
              privileges:
                - CREATE_EXTERNAL_TABLE
                - READ_FILES
    

    For Azure

    Create ExternalLocation Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new ExternalLocation(name: string, args: ExternalLocationArgs, opts?: CustomResourceOptions);
    @overload
    def ExternalLocation(resource_name: str,
                         args: ExternalLocationArgs,
                         opts: Optional[ResourceOptions] = None)
    
    @overload
    def ExternalLocation(resource_name: str,
                         opts: Optional[ResourceOptions] = None,
                         credential_name: Optional[str] = None,
                         url: Optional[str] = None,
                         access_point: Optional[str] = None,
                         comment: Optional[str] = None,
                         encryption_details: Optional[ExternalLocationEncryptionDetailsArgs] = None,
                         force_destroy: Optional[bool] = None,
                         force_update: Optional[bool] = None,
                         metastore_id: Optional[str] = None,
                         name: Optional[str] = None,
                         owner: Optional[str] = None,
                         read_only: Optional[bool] = None,
                         skip_validation: Optional[bool] = None)
    func NewExternalLocation(ctx *Context, name string, args ExternalLocationArgs, opts ...ResourceOption) (*ExternalLocation, error)
    public ExternalLocation(string name, ExternalLocationArgs args, CustomResourceOptions? opts = null)
    public ExternalLocation(String name, ExternalLocationArgs args)
    public ExternalLocation(String name, ExternalLocationArgs args, CustomResourceOptions options)
    
    type: databricks:ExternalLocation
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args ExternalLocationArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args ExternalLocationArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args ExternalLocationArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args ExternalLocationArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args ExternalLocationArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Example

    The following reference example uses placeholder values for all input properties.

    var externalLocationResource = new Databricks.ExternalLocation("externalLocationResource", new()
    {
        CredentialName = "string",
        Url = "string",
        AccessPoint = "string",
        Comment = "string",
        EncryptionDetails = new Databricks.Inputs.ExternalLocationEncryptionDetailsArgs
        {
            SseEncryptionDetails = new Databricks.Inputs.ExternalLocationEncryptionDetailsSseEncryptionDetailsArgs
            {
                Algorithm = "string",
                AwsKmsKeyArn = "string",
            },
        },
        ForceDestroy = false,
        ForceUpdate = false,
        MetastoreId = "string",
        Name = "string",
        Owner = "string",
        ReadOnly = false,
        SkipValidation = false,
    });
    
    example, err := databricks.NewExternalLocation(ctx, "externalLocationResource", &databricks.ExternalLocationArgs{
    	CredentialName: pulumi.String("string"),
    	Url:            pulumi.String("string"),
    	AccessPoint:    pulumi.String("string"),
    	Comment:        pulumi.String("string"),
    	EncryptionDetails: &databricks.ExternalLocationEncryptionDetailsArgs{
    		SseEncryptionDetails: &databricks.ExternalLocationEncryptionDetailsSseEncryptionDetailsArgs{
    			Algorithm:    pulumi.String("string"),
    			AwsKmsKeyArn: pulumi.String("string"),
    		},
    	},
    	ForceDestroy:   pulumi.Bool(false),
    	ForceUpdate:    pulumi.Bool(false),
    	MetastoreId:    pulumi.String("string"),
    	Name:           pulumi.String("string"),
    	Owner:          pulumi.String("string"),
    	ReadOnly:       pulumi.Bool(false),
    	SkipValidation: pulumi.Bool(false),
    })
    
    var externalLocationResource = new ExternalLocation("externalLocationResource", ExternalLocationArgs.builder()        
        .credentialName("string")
        .url("string")
        .accessPoint("string")
        .comment("string")
        .encryptionDetails(ExternalLocationEncryptionDetailsArgs.builder()
            .sseEncryptionDetails(ExternalLocationEncryptionDetailsSseEncryptionDetailsArgs.builder()
                .algorithm("string")
                .awsKmsKeyArn("string")
                .build())
            .build())
        .forceDestroy(false)
        .forceUpdate(false)
        .metastoreId("string")
        .name("string")
        .owner("string")
        .readOnly(false)
        .skipValidation(false)
        .build());
    
    external_location_resource = databricks.ExternalLocation("externalLocationResource",
        credential_name="string",
        url="string",
        access_point="string",
        comment="string",
        encryption_details=databricks.ExternalLocationEncryptionDetailsArgs(
            sse_encryption_details=databricks.ExternalLocationEncryptionDetailsSseEncryptionDetailsArgs(
                algorithm="string",
                aws_kms_key_arn="string",
            ),
        ),
        force_destroy=False,
        force_update=False,
        metastore_id="string",
        name="string",
        owner="string",
        read_only=False,
        skip_validation=False)
    
    const externalLocationResource = new databricks.ExternalLocation("externalLocationResource", {
        credentialName: "string",
        url: "string",
        accessPoint: "string",
        comment: "string",
        encryptionDetails: {
            sseEncryptionDetails: {
                algorithm: "string",
                awsKmsKeyArn: "string",
            },
        },
        forceDestroy: false,
        forceUpdate: false,
        metastoreId: "string",
        name: "string",
        owner: "string",
        readOnly: false,
        skipValidation: false,
    });
    
    type: databricks:ExternalLocation
    properties:
        accessPoint: string
        comment: string
        credentialName: string
        encryptionDetails:
            sseEncryptionDetails:
                algorithm: string
                awsKmsKeyArn: string
        forceDestroy: false
        forceUpdate: false
        metastoreId: string
        name: string
        owner: string
        readOnly: false
        skipValidation: false
        url: string
    

    ExternalLocation Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The ExternalLocation resource accepts the following input properties:

    CredentialName string
    Name of the databricks.StorageCredential to use with this external location.
    Url string
    Path URL in cloud storage, of the form: s3://[bucket-host]/[bucket-dir] (AWS), abfss://[user]@[host]/[path] (Azure), gs://[bucket-host]/[bucket-dir] (GCP).
    AccessPoint string
    The ARN of the s3 access point to use with the external location (AWS).
    Comment string
    User-supplied free-form text.
    EncryptionDetails ExternalLocationEncryptionDetails
    The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
    ForceDestroy bool
    Destroy external location regardless of its dependents.
    ForceUpdate bool
    Update external location regardless of its dependents.
    MetastoreId string
    Name string
    Name of External Location, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    Owner string
    Username/groupname/sp application_id of the external location owner.
    ReadOnly bool
    Indicates whether the external location is read-only.
    SkipValidation bool
    Suppress validation errors if any & force save the external location
    CredentialName string
    Name of the databricks.StorageCredential to use with this external location.
    Url string
    Path URL in cloud storage, of the form: s3://[bucket-host]/[bucket-dir] (AWS), abfss://[user]@[host]/[path] (Azure), gs://[bucket-host]/[bucket-dir] (GCP).
    AccessPoint string
    The ARN of the s3 access point to use with the external location (AWS).
    Comment string
    User-supplied free-form text.
    EncryptionDetails ExternalLocationEncryptionDetailsArgs
    The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
    ForceDestroy bool
    Destroy external location regardless of its dependents.
    ForceUpdate bool
    Update external location regardless of its dependents.
    MetastoreId string
    Name string
    Name of External Location, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    Owner string
    Username/groupname/sp application_id of the external location owner.
    ReadOnly bool
    Indicates whether the external location is read-only.
    SkipValidation bool
    Suppress validation errors if any & force save the external location
    credentialName String
    Name of the databricks.StorageCredential to use with this external location.
    url String
    Path URL in cloud storage, of the form: s3://[bucket-host]/[bucket-dir] (AWS), abfss://[user]@[host]/[path] (Azure), gs://[bucket-host]/[bucket-dir] (GCP).
    accessPoint String
    The ARN of the s3 access point to use with the external location (AWS).
    comment String
    User-supplied free-form text.
    encryptionDetails ExternalLocationEncryptionDetails
    The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
    forceDestroy Boolean
    Destroy external location regardless of its dependents.
    forceUpdate Boolean
    Update external location regardless of its dependents.
    metastoreId String
    name String
    Name of External Location, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner String
    Username/groupname/sp application_id of the external location owner.
    readOnly Boolean
    Indicates whether the external location is read-only.
    skipValidation Boolean
    Suppress validation errors if any & force save the external location
    credentialName string
    Name of the databricks.StorageCredential to use with this external location.
    url string
    Path URL in cloud storage, of the form: s3://[bucket-host]/[bucket-dir] (AWS), abfss://[user]@[host]/[path] (Azure), gs://[bucket-host]/[bucket-dir] (GCP).
    accessPoint string
    The ARN of the s3 access point to use with the external location (AWS).
    comment string
    User-supplied free-form text.
    encryptionDetails ExternalLocationEncryptionDetails
    The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
    forceDestroy boolean
    Destroy external location regardless of its dependents.
    forceUpdate boolean
    Update external location regardless of its dependents.
    metastoreId string
    name string
    Name of External Location, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner string
    Username/groupname/sp application_id of the external location owner.
    readOnly boolean
    Indicates whether the external location is read-only.
    skipValidation boolean
    Suppress validation errors if any & force save the external location
    credential_name str
    Name of the databricks.StorageCredential to use with this external location.
    url str
    Path URL in cloud storage, of the form: s3://[bucket-host]/[bucket-dir] (AWS), abfss://[user]@[host]/[path] (Azure), gs://[bucket-host]/[bucket-dir] (GCP).
    access_point str
    The ARN of the s3 access point to use with the external location (AWS).
    comment str
    User-supplied free-form text.
    encryption_details ExternalLocationEncryptionDetailsArgs
    The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
    force_destroy bool
    Destroy external location regardless of its dependents.
    force_update bool
    Update external location regardless of its dependents.
    metastore_id str
    name str
    Name of External Location, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner str
    Username/groupname/sp application_id of the external location owner.
    read_only bool
    Indicates whether the external location is read-only.
    skip_validation bool
    Suppress validation errors if any & force save the external location
    credentialName String
    Name of the databricks.StorageCredential to use with this external location.
    url String
    Path URL in cloud storage, of the form: s3://[bucket-host]/[bucket-dir] (AWS), abfss://[user]@[host]/[path] (Azure), gs://[bucket-host]/[bucket-dir] (GCP).
    accessPoint String
    The ARN of the s3 access point to use with the external location (AWS).
    comment String
    User-supplied free-form text.
    encryptionDetails Property Map
    The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
    forceDestroy Boolean
    Destroy external location regardless of its dependents.
    forceUpdate Boolean
    Update external location regardless of its dependents.
    metastoreId String
    name String
    Name of External Location, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner String
    Username/groupname/sp application_id of the external location owner.
    readOnly Boolean
    Indicates whether the external location is read-only.
    skipValidation Boolean
    Suppress validation errors if any & force save the external location

    Outputs

    All input properties are implicitly available as output properties. Additionally, the ExternalLocation resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing ExternalLocation Resource

    Get an existing ExternalLocation resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: ExternalLocationState, opts?: CustomResourceOptions): ExternalLocation
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            access_point: Optional[str] = None,
            comment: Optional[str] = None,
            credential_name: Optional[str] = None,
            encryption_details: Optional[ExternalLocationEncryptionDetailsArgs] = None,
            force_destroy: Optional[bool] = None,
            force_update: Optional[bool] = None,
            metastore_id: Optional[str] = None,
            name: Optional[str] = None,
            owner: Optional[str] = None,
            read_only: Optional[bool] = None,
            skip_validation: Optional[bool] = None,
            url: Optional[str] = None) -> ExternalLocation
    func GetExternalLocation(ctx *Context, name string, id IDInput, state *ExternalLocationState, opts ...ResourceOption) (*ExternalLocation, error)
    public static ExternalLocation Get(string name, Input<string> id, ExternalLocationState? state, CustomResourceOptions? opts = null)
    public static ExternalLocation get(String name, Output<String> id, ExternalLocationState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AccessPoint string
    The ARN of the s3 access point to use with the external location (AWS).
    Comment string
    User-supplied free-form text.
    CredentialName string
    Name of the databricks.StorageCredential to use with this external location.
    EncryptionDetails ExternalLocationEncryptionDetails
    The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
    ForceDestroy bool
    Destroy external location regardless of its dependents.
    ForceUpdate bool
    Update external location regardless of its dependents.
    MetastoreId string
    Name string
    Name of External Location, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    Owner string
    Username/groupname/sp application_id of the external location owner.
    ReadOnly bool
    Indicates whether the external location is read-only.
    SkipValidation bool
    Suppress validation errors if any & force save the external location
    Url string
    Path URL in cloud storage, of the form: s3://[bucket-host]/[bucket-dir] (AWS), abfss://[user]@[host]/[path] (Azure), gs://[bucket-host]/[bucket-dir] (GCP).
    AccessPoint string
    The ARN of the s3 access point to use with the external location (AWS).
    Comment string
    User-supplied free-form text.
    CredentialName string
    Name of the databricks.StorageCredential to use with this external location.
    EncryptionDetails ExternalLocationEncryptionDetailsArgs
    The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
    ForceDestroy bool
    Destroy external location regardless of its dependents.
    ForceUpdate bool
    Update external location regardless of its dependents.
    MetastoreId string
    Name string
    Name of External Location, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    Owner string
    Username/groupname/sp application_id of the external location owner.
    ReadOnly bool
    Indicates whether the external location is read-only.
    SkipValidation bool
    Suppress validation errors if any & force save the external location
    Url string
    Path URL in cloud storage, of the form: s3://[bucket-host]/[bucket-dir] (AWS), abfss://[user]@[host]/[path] (Azure), gs://[bucket-host]/[bucket-dir] (GCP).
    accessPoint String
    The ARN of the s3 access point to use with the external location (AWS).
    comment String
    User-supplied free-form text.
    credentialName String
    Name of the databricks.StorageCredential to use with this external location.
    encryptionDetails ExternalLocationEncryptionDetails
    The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
    forceDestroy Boolean
    Destroy external location regardless of its dependents.
    forceUpdate Boolean
    Update external location regardless of its dependents.
    metastoreId String
    name String
    Name of External Location, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner String
    Username/groupname/sp application_id of the external location owner.
    readOnly Boolean
    Indicates whether the external location is read-only.
    skipValidation Boolean
    Suppress validation errors if any & force save the external location
    url String
    Path URL in cloud storage, of the form: s3://[bucket-host]/[bucket-dir] (AWS), abfss://[user]@[host]/[path] (Azure), gs://[bucket-host]/[bucket-dir] (GCP).
    accessPoint string
    The ARN of the s3 access point to use with the external location (AWS).
    comment string
    User-supplied free-form text.
    credentialName string
    Name of the databricks.StorageCredential to use with this external location.
    encryptionDetails ExternalLocationEncryptionDetails
    The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
    forceDestroy boolean
    Destroy external location regardless of its dependents.
    forceUpdate boolean
    Update external location regardless of its dependents.
    metastoreId string
    name string
    Name of External Location, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner string
    Username/groupname/sp application_id of the external location owner.
    readOnly boolean
    Indicates whether the external location is read-only.
    skipValidation boolean
    Suppress validation errors if any & force save the external location
    url string
    Path URL in cloud storage, of the form: s3://[bucket-host]/[bucket-dir] (AWS), abfss://[user]@[host]/[path] (Azure), gs://[bucket-host]/[bucket-dir] (GCP).
    access_point str
    The ARN of the s3 access point to use with the external location (AWS).
    comment str
    User-supplied free-form text.
    credential_name str
    Name of the databricks.StorageCredential to use with this external location.
    encryption_details ExternalLocationEncryptionDetailsArgs
    The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
    force_destroy bool
    Destroy external location regardless of its dependents.
    force_update bool
    Update external location regardless of its dependents.
    metastore_id str
    name str
    Name of External Location, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner str
    Username/groupname/sp application_id of the external location owner.
    read_only bool
    Indicates whether the external location is read-only.
    skip_validation bool
    Suppress validation errors if any & force save the external location
    url str
    Path URL in cloud storage, of the form: s3://[bucket-host]/[bucket-dir] (AWS), abfss://[user]@[host]/[path] (Azure), gs://[bucket-host]/[bucket-dir] (GCP).
    accessPoint String
    The ARN of the s3 access point to use with the external location (AWS).
    comment String
    User-supplied free-form text.
    credentialName String
    Name of the databricks.StorageCredential to use with this external location.
    encryptionDetails Property Map
    The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
    forceDestroy Boolean
    Destroy external location regardless of its dependents.
    forceUpdate Boolean
    Update external location regardless of its dependents.
    metastoreId String
    name String
    Name of External Location, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner String
    Username/groupname/sp application_id of the external location owner.
    readOnly Boolean
    Indicates whether the external location is read-only.
    skipValidation Boolean
    Suppress validation errors if any & force save the external location
    url String
    Path URL in cloud storage, of the form: s3://[bucket-host]/[bucket-dir] (AWS), abfss://[user]@[host]/[path] (Azure), gs://[bucket-host]/[bucket-dir] (GCP).

    Supporting Types

    ExternalLocationEncryptionDetails, ExternalLocationEncryptionDetailsArgs

    ExternalLocationEncryptionDetailsSseEncryptionDetails, ExternalLocationEncryptionDetailsSseEncryptionDetailsArgs

    Algorithm string
    AwsKmsKeyArn string
    Algorithm string
    AwsKmsKeyArn string
    algorithm String
    awsKmsKeyArn String
    algorithm string
    awsKmsKeyArn string
    algorithm String
    awsKmsKeyArn String

    Import

    This resource can be imported by name:

    bash

    $ pulumi import databricks:index/externalLocation:ExternalLocation this <name>
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.36.0 published on Friday, Apr 19, 2024 by Pulumi