1. Packages
  2. Databricks
  3. API Docs
  4. MetastoreAssignment
Databricks v1.27.0 published on Tuesday, Dec 5, 2023 by Pulumi

databricks.MetastoreAssignment

Explore with Pulumi AI

databricks logo
Databricks v1.27.0 published on Tuesday, Dec 5, 2023 by Pulumi

    A single databricks.Metastore can be shared across Databricks workspaces, and each linked workspace has a consistent view of the data and a single set of access policies. You can only create a single metastore for each region in which your organization operates.

    Example Usage

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var thisMetastore = new Databricks.Metastore("thisMetastore", new()
        {
            StorageRoot = $"s3://{aws_s3_bucket.Metastore.Id}/metastore",
            Owner = "uc admins",
            Region = "us-east-1",
            ForceDestroy = true,
        });
    
        var thisMetastoreAssignment = new Databricks.MetastoreAssignment("thisMetastoreAssignment", new()
        {
            MetastoreId = thisMetastore.Id,
            WorkspaceId = local.Workspace_id,
        });
    
    });
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		thisMetastore, err := databricks.NewMetastore(ctx, "thisMetastore", &databricks.MetastoreArgs{
    			StorageRoot:  pulumi.String(fmt.Sprintf("s3://%v/metastore", aws_s3_bucket.Metastore.Id)),
    			Owner:        pulumi.String("uc admins"),
    			Region:       pulumi.String("us-east-1"),
    			ForceDestroy: pulumi.Bool(true),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewMetastoreAssignment(ctx, "thisMetastoreAssignment", &databricks.MetastoreAssignmentArgs{
    			MetastoreId: thisMetastore.ID(),
    			WorkspaceId: pulumi.Any(local.Workspace_id),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.Metastore;
    import com.pulumi.databricks.MetastoreArgs;
    import com.pulumi.databricks.MetastoreAssignment;
    import com.pulumi.databricks.MetastoreAssignmentArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var thisMetastore = new Metastore("thisMetastore", MetastoreArgs.builder()        
                .storageRoot(String.format("s3://%s/metastore", aws_s3_bucket.metastore().id()))
                .owner("uc admins")
                .region("us-east-1")
                .forceDestroy(true)
                .build());
    
            var thisMetastoreAssignment = new MetastoreAssignment("thisMetastoreAssignment", MetastoreAssignmentArgs.builder()        
                .metastoreId(thisMetastore.id())
                .workspaceId(local.workspace_id())
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    this_metastore = databricks.Metastore("thisMetastore",
        storage_root=f"s3://{aws_s3_bucket['metastore']['id']}/metastore",
        owner="uc admins",
        region="us-east-1",
        force_destroy=True)
    this_metastore_assignment = databricks.MetastoreAssignment("thisMetastoreAssignment",
        metastore_id=this_metastore.id,
        workspace_id=local["workspace_id"])
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const thisMetastore = new databricks.Metastore("thisMetastore", {
        storageRoot: `s3://${aws_s3_bucket.metastore.id}/metastore`,
        owner: "uc admins",
        region: "us-east-1",
        forceDestroy: true,
    });
    const thisMetastoreAssignment = new databricks.MetastoreAssignment("thisMetastoreAssignment", {
        metastoreId: thisMetastore.id,
        workspaceId: local.workspace_id,
    });
    
    resources:
      thisMetastore:
        type: databricks:Metastore
        properties:
          storageRoot: s3://${aws_s3_bucket.metastore.id}/metastore
          owner: uc admins
          region: us-east-1
          forceDestroy: true
      thisMetastoreAssignment:
        type: databricks:MetastoreAssignment
        properties:
          metastoreId: ${thisMetastore.id}
          workspaceId: ${local.workspace_id}
    

    Create MetastoreAssignment Resource

    new MetastoreAssignment(name: string, args: MetastoreAssignmentArgs, opts?: CustomResourceOptions);
    @overload
    def MetastoreAssignment(resource_name: str,
                            opts: Optional[ResourceOptions] = None,
                            default_catalog_name: Optional[str] = None,
                            metastore_id: Optional[str] = None,
                            workspace_id: Optional[int] = None)
    @overload
    def MetastoreAssignment(resource_name: str,
                            args: MetastoreAssignmentArgs,
                            opts: Optional[ResourceOptions] = None)
    func NewMetastoreAssignment(ctx *Context, name string, args MetastoreAssignmentArgs, opts ...ResourceOption) (*MetastoreAssignment, error)
    public MetastoreAssignment(string name, MetastoreAssignmentArgs args, CustomResourceOptions? opts = null)
    public MetastoreAssignment(String name, MetastoreAssignmentArgs args)
    public MetastoreAssignment(String name, MetastoreAssignmentArgs args, CustomResourceOptions options)
    
    type: databricks:MetastoreAssignment
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    
    name string
    The unique name of the resource.
    args MetastoreAssignmentArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args MetastoreAssignmentArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args MetastoreAssignmentArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args MetastoreAssignmentArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args MetastoreAssignmentArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    MetastoreAssignment Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The MetastoreAssignment resource accepts the following input properties:

    MetastoreId string

    Unique identifier of the parent Metastore

    WorkspaceId int

    id of the workspace for the assignment

    DefaultCatalogName string

    Default catalog used for this assignment, default to hive_metastore

    MetastoreId string

    Unique identifier of the parent Metastore

    WorkspaceId int

    id of the workspace for the assignment

    DefaultCatalogName string

    Default catalog used for this assignment, default to hive_metastore

    metastoreId String

    Unique identifier of the parent Metastore

    workspaceId Integer

    id of the workspace for the assignment

    defaultCatalogName String

    Default catalog used for this assignment, default to hive_metastore

    metastoreId string

    Unique identifier of the parent Metastore

    workspaceId number

    id of the workspace for the assignment

    defaultCatalogName string

    Default catalog used for this assignment, default to hive_metastore

    metastore_id str

    Unique identifier of the parent Metastore

    workspace_id int

    id of the workspace for the assignment

    default_catalog_name str

    Default catalog used for this assignment, default to hive_metastore

    metastoreId String

    Unique identifier of the parent Metastore

    workspaceId Number

    id of the workspace for the assignment

    defaultCatalogName String

    Default catalog used for this assignment, default to hive_metastore

    Outputs

    All input properties are implicitly available as output properties. Additionally, the MetastoreAssignment resource produces the following output properties:

    Id string

    The provider-assigned unique ID for this managed resource.

    Id string

    The provider-assigned unique ID for this managed resource.

    id String

    The provider-assigned unique ID for this managed resource.

    id string

    The provider-assigned unique ID for this managed resource.

    id str

    The provider-assigned unique ID for this managed resource.

    id String

    The provider-assigned unique ID for this managed resource.

    Look up Existing MetastoreAssignment Resource

    Get an existing MetastoreAssignment resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: MetastoreAssignmentState, opts?: CustomResourceOptions): MetastoreAssignment
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            default_catalog_name: Optional[str] = None,
            metastore_id: Optional[str] = None,
            workspace_id: Optional[int] = None) -> MetastoreAssignment
    func GetMetastoreAssignment(ctx *Context, name string, id IDInput, state *MetastoreAssignmentState, opts ...ResourceOption) (*MetastoreAssignment, error)
    public static MetastoreAssignment Get(string name, Input<string> id, MetastoreAssignmentState? state, CustomResourceOptions? opts = null)
    public static MetastoreAssignment get(String name, Output<String> id, MetastoreAssignmentState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    DefaultCatalogName string

    Default catalog used for this assignment, default to hive_metastore

    MetastoreId string

    Unique identifier of the parent Metastore

    WorkspaceId int

    id of the workspace for the assignment

    DefaultCatalogName string

    Default catalog used for this assignment, default to hive_metastore

    MetastoreId string

    Unique identifier of the parent Metastore

    WorkspaceId int

    id of the workspace for the assignment

    defaultCatalogName String

    Default catalog used for this assignment, default to hive_metastore

    metastoreId String

    Unique identifier of the parent Metastore

    workspaceId Integer

    id of the workspace for the assignment

    defaultCatalogName string

    Default catalog used for this assignment, default to hive_metastore

    metastoreId string

    Unique identifier of the parent Metastore

    workspaceId number

    id of the workspace for the assignment

    default_catalog_name str

    Default catalog used for this assignment, default to hive_metastore

    metastore_id str

    Unique identifier of the parent Metastore

    workspace_id int

    id of the workspace for the assignment

    defaultCatalogName String

    Default catalog used for this assignment, default to hive_metastore

    metastoreId String

    Unique identifier of the parent Metastore

    workspaceId Number

    id of the workspace for the assignment

    Import

    This resource can be imported by combination of workspace id and metastore idbash

     $ pulumi import databricks:index/metastoreAssignment:MetastoreAssignment this '<workspace_id>|<metastore_id>'
    

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes

    This Pulumi package is based on the databricks Terraform Provider.

    databricks logo
    Databricks v1.27.0 published on Tuesday, Dec 5, 2023 by Pulumi