1. Packages
  2. Packages
  3. Databricks Provider
  4. API Docs
  5. MetastoreAssignment
Viewing docs for Databricks v0.4.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi
databricks logo
Viewing docs for Databricks v0.4.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi

    Private Preview This feature is in Private Preview. Contact your Databricks representative to request access.

    A single databricks.Metastore can be shared across Databricks workspaces, and each linked workspace has a consistent view of the data and a single set of access policies. It is only recommended to have multiple metastores when organizations wish to have hard isolation boundaries between data (note that data cannot be easily joined/queried across metastores).

    Example Usage

    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    class MyStack : Stack
    {
        public MyStack()
        {
            var thisMetastore = new Databricks.Metastore("thisMetastore", new Databricks.MetastoreArgs
            {
                StorageRoot = $"s3://{aws_s3_bucket.Metastore.Id}/metastore",
                Owner = "uc admins",
                ForceDestroy = true,
            });
            var thisMetastoreAssignment = new Databricks.MetastoreAssignment("thisMetastoreAssignment", new Databricks.MetastoreAssignmentArgs
            {
                MetastoreId = thisMetastore.Id,
                WorkspaceId = local.Workspace_id,
            });
        }
    
    }
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		thisMetastore, err := databricks.NewMetastore(ctx, "thisMetastore", &databricks.MetastoreArgs{
    			StorageRoot:  pulumi.String(fmt.Sprintf("%v%v%v", "s3://", aws_s3_bucket.Metastore.Id, "/metastore")),
    			Owner:        pulumi.String("uc admins"),
    			ForceDestroy: pulumi.Bool(true),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewMetastoreAssignment(ctx, "thisMetastoreAssignment", &databricks.MetastoreAssignmentArgs{
    			MetastoreId: thisMetastore.ID(),
    			WorkspaceId: pulumi.Any(local.Workspace_id),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    

    Example coming soon!

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const thisMetastore = new databricks.Metastore("thisMetastore", {
        storageRoot: `s3://${aws_s3_bucket.metastore.id}/metastore`,
        owner: "uc admins",
        forceDestroy: true,
    });
    const thisMetastoreAssignment = new databricks.MetastoreAssignment("thisMetastoreAssignment", {
        metastoreId: thisMetastore.id,
        workspaceId: local.workspace_id,
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    this_metastore = databricks.Metastore("thisMetastore",
        storage_root=f"s3://{aws_s3_bucket['metastore']['id']}/metastore",
        owner="uc admins",
        force_destroy=True)
    this_metastore_assignment = databricks.MetastoreAssignment("thisMetastoreAssignment",
        metastore_id=this_metastore.id,
        workspace_id=local["workspace_id"])
    

    Example coming soon!

    Create MetastoreAssignment Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new MetastoreAssignment(name: string, args: MetastoreAssignmentArgs, opts?: CustomResourceOptions);
    @overload
    def MetastoreAssignment(resource_name: str,
                            args: MetastoreAssignmentArgs,
                            opts: Optional[ResourceOptions] = None)
    
    @overload
    def MetastoreAssignment(resource_name: str,
                            opts: Optional[ResourceOptions] = None,
                            metastore_id: Optional[str] = None,
                            workspace_id: Optional[int] = None,
                            default_catalog_name: Optional[str] = None)
    func NewMetastoreAssignment(ctx *Context, name string, args MetastoreAssignmentArgs, opts ...ResourceOption) (*MetastoreAssignment, error)
    public MetastoreAssignment(string name, MetastoreAssignmentArgs args, CustomResourceOptions? opts = null)
    public MetastoreAssignment(String name, MetastoreAssignmentArgs args)
    public MetastoreAssignment(String name, MetastoreAssignmentArgs args, CustomResourceOptions options)
    
    type: databricks:MetastoreAssignment
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args MetastoreAssignmentArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args MetastoreAssignmentArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args MetastoreAssignmentArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args MetastoreAssignmentArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args MetastoreAssignmentArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var metastoreAssignmentResource = new Databricks.MetastoreAssignment("metastoreAssignmentResource", new()
    {
        MetastoreId = "string",
        WorkspaceId = 0,
        DefaultCatalogName = "string",
    });
    
    example, err := databricks.NewMetastoreAssignment(ctx, "metastoreAssignmentResource", &databricks.MetastoreAssignmentArgs{
    	MetastoreId:        pulumi.String("string"),
    	WorkspaceId:        pulumi.Int(0),
    	DefaultCatalogName: pulumi.String("string"),
    })
    
    var metastoreAssignmentResource = new MetastoreAssignment("metastoreAssignmentResource", MetastoreAssignmentArgs.builder()
        .metastoreId("string")
        .workspaceId(0)
        .defaultCatalogName("string")
        .build());
    
    metastore_assignment_resource = databricks.MetastoreAssignment("metastoreAssignmentResource",
        metastore_id="string",
        workspace_id=0,
        default_catalog_name="string")
    
    const metastoreAssignmentResource = new databricks.MetastoreAssignment("metastoreAssignmentResource", {
        metastoreId: "string",
        workspaceId: 0,
        defaultCatalogName: "string",
    });
    
    type: databricks:MetastoreAssignment
    properties:
        defaultCatalogName: string
        metastoreId: string
        workspaceId: 0
    

    MetastoreAssignment Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The MetastoreAssignment resource accepts the following input properties:

    MetastoreId string
    Unique identifier of the parent Metastore
    WorkspaceId int
    id of the workspace for the assignment
    DefaultCatalogName string
    Default catalog used for this assignment, default to hive_metastore
    MetastoreId string
    Unique identifier of the parent Metastore
    WorkspaceId int
    id of the workspace for the assignment
    DefaultCatalogName string
    Default catalog used for this assignment, default to hive_metastore
    metastoreId String
    Unique identifier of the parent Metastore
    workspaceId Integer
    id of the workspace for the assignment
    defaultCatalogName String
    Default catalog used for this assignment, default to hive_metastore
    metastoreId string
    Unique identifier of the parent Metastore
    workspaceId number
    id of the workspace for the assignment
    defaultCatalogName string
    Default catalog used for this assignment, default to hive_metastore
    metastore_id str
    Unique identifier of the parent Metastore
    workspace_id int
    id of the workspace for the assignment
    default_catalog_name str
    Default catalog used for this assignment, default to hive_metastore
    metastoreId String
    Unique identifier of the parent Metastore
    workspaceId Number
    id of the workspace for the assignment
    defaultCatalogName String
    Default catalog used for this assignment, default to hive_metastore

    Outputs

    All input properties are implicitly available as output properties. Additionally, the MetastoreAssignment resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing MetastoreAssignment Resource

    Get an existing MetastoreAssignment resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: MetastoreAssignmentState, opts?: CustomResourceOptions): MetastoreAssignment
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            default_catalog_name: Optional[str] = None,
            metastore_id: Optional[str] = None,
            workspace_id: Optional[int] = None) -> MetastoreAssignment
    func GetMetastoreAssignment(ctx *Context, name string, id IDInput, state *MetastoreAssignmentState, opts ...ResourceOption) (*MetastoreAssignment, error)
    public static MetastoreAssignment Get(string name, Input<string> id, MetastoreAssignmentState? state, CustomResourceOptions? opts = null)
    public static MetastoreAssignment get(String name, Output<String> id, MetastoreAssignmentState state, CustomResourceOptions options)
    resources:  _:    type: databricks:MetastoreAssignment    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    DefaultCatalogName string
    Default catalog used for this assignment, default to hive_metastore
    MetastoreId string
    Unique identifier of the parent Metastore
    WorkspaceId int
    id of the workspace for the assignment
    DefaultCatalogName string
    Default catalog used for this assignment, default to hive_metastore
    MetastoreId string
    Unique identifier of the parent Metastore
    WorkspaceId int
    id of the workspace for the assignment
    defaultCatalogName String
    Default catalog used for this assignment, default to hive_metastore
    metastoreId String
    Unique identifier of the parent Metastore
    workspaceId Integer
    id of the workspace for the assignment
    defaultCatalogName string
    Default catalog used for this assignment, default to hive_metastore
    metastoreId string
    Unique identifier of the parent Metastore
    workspaceId number
    id of the workspace for the assignment
    default_catalog_name str
    Default catalog used for this assignment, default to hive_metastore
    metastore_id str
    Unique identifier of the parent Metastore
    workspace_id int
    id of the workspace for the assignment
    defaultCatalogName String
    Default catalog used for this assignment, default to hive_metastore
    metastoreId String
    Unique identifier of the parent Metastore
    workspaceId Number
    id of the workspace for the assignment

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Viewing docs for Databricks v0.4.0 (Older version)
    published on Monday, Mar 9, 2026 by Pulumi
      Try Pulumi Cloud free. Your team will thank you.