1. Packages
  2. Packages
  3. Databricks Provider
  4. API Docs
  5. Table
Viewing docs for Databricks v0.4.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi
databricks logo
Viewing docs for Databricks v0.4.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi

    Private Preview This feature is in Private Preview. Contact your Databricks representative to request access.

    Within a metastore, Unity Catalog provides a 3-level namespace for organizing data: Catalogs, databases (also called schemas), and tables / views.

    Note This resource has an evolving API, which will change in the upcoming versions of the provider in order to simplify user experience.

    A databricks.Table is contained within databricks_schema.

    The following resources are used in the same context:

    • databricks.Table data to list tables within Unity Catalog.
    • databricks.Schema data to list schemas within Unity Catalog.
    • databricks.Catalog data to list catalogs within Unity Catalog.

    Example Usage

    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    class MyStack : Stack
    {
        public MyStack()
        {
            var sandbox = new Databricks.Catalog("sandbox", new Databricks.CatalogArgs
            {
                MetastoreId = databricks_metastore.This.Id,
                Comment = "this catalog is managed by terraform",
                Properties = 
                {
                    { "purpose", "testing" },
                },
            });
            var things = new Databricks.Schema("things", new Databricks.SchemaArgs
            {
                CatalogName = sandbox.Id,
                Comment = "this database is managed by terraform",
                Properties = 
                {
                    { "kind", "various" },
                },
            });
            var thing = new Databricks.Table("thing", new Databricks.TableArgs
            {
                CatalogName = sandbox.Id,
                SchemaName = things.Name,
                TableType = "MANAGED",
                DataSourceFormat = "DELTA",
                Columns = 
                {
                    new Databricks.Inputs.TableColumnArgs
                    {
                        Name = "id",
                        Position = 0,
                        TypeName = "INT",
                        TypeText = "int",
                        TypeJson = "{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}",
                    },
                    new Databricks.Inputs.TableColumnArgs
                    {
                        Name = "name",
                        Position = 1,
                        TypeName = "STRING",
                        TypeText = "varchar(64)",
                        TypeJson = "{\"name\":\"name\",\"type\":\"varchar(64)\",\"nullable\":true,\"metadata\":{}}",
                    },
                },
                Comment = "this table is managed by terraform",
            }, new CustomResourceOptions
            {
                Provider = databricks.Workspace,
            });
        }
    
    }
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		sandbox, err := databricks.NewCatalog(ctx, "sandbox", &databricks.CatalogArgs{
    			MetastoreId: pulumi.Any(databricks_metastore.This.Id),
    			Comment:     pulumi.String("this catalog is managed by terraform"),
    			Properties: pulumi.AnyMap{
    				"purpose": pulumi.Any("testing"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		things, err := databricks.NewSchema(ctx, "things", &databricks.SchemaArgs{
    			CatalogName: sandbox.ID(),
    			Comment:     pulumi.String("this database is managed by terraform"),
    			Properties: pulumi.AnyMap{
    				"kind": pulumi.Any("various"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewTable(ctx, "thing", &databricks.TableArgs{
    			CatalogName:      sandbox.ID(),
    			SchemaName:       things.Name,
    			TableType:        pulumi.String("MANAGED"),
    			DataSourceFormat: pulumi.String("DELTA"),
    			Columns: TableColumnArray{
    				&TableColumnArgs{
    					Name:     pulumi.String("id"),
    					Position: pulumi.Int(0),
    					TypeName: pulumi.String("INT"),
    					TypeText: pulumi.String("int"),
    					TypeJson: pulumi.String("{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}"),
    				},
    				&TableColumnArgs{
    					Name:     pulumi.String("name"),
    					Position: pulumi.Int(1),
    					TypeName: pulumi.String("STRING"),
    					TypeText: pulumi.String("varchar(64)"),
    					TypeJson: pulumi.String("{\"name\":\"name\",\"type\":\"varchar(64)\",\"nullable\":true,\"metadata\":{}}"),
    				},
    			},
    			Comment: pulumi.String("this table is managed by terraform"),
    		}, pulumi.Provider(databricks.Workspace))
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    

    Example coming soon!

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const sandbox = new databricks.Catalog("sandbox", {
        metastoreId: databricks_metastore["this"].id,
        comment: "this catalog is managed by terraform",
        properties: {
            purpose: "testing",
        },
    });
    const things = new databricks.Schema("things", {
        catalogName: sandbox.id,
        comment: "this database is managed by terraform",
        properties: {
            kind: "various",
        },
    });
    const thing = new databricks.Table("thing", {
        catalogName: sandbox.id,
        schemaName: things.name,
        tableType: "MANAGED",
        dataSourceFormat: "DELTA",
        columns: [
            {
                name: "id",
                position: 0,
                typeName: "INT",
                typeText: "int",
                typeJson: "{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}",
            },
            {
                name: "name",
                position: 1,
                typeName: "STRING",
                typeText: "varchar(64)",
                typeJson: "{\"name\":\"name\",\"type\":\"varchar(64)\",\"nullable\":true,\"metadata\":{}}",
            },
        ],
        comment: "this table is managed by terraform",
    }, {
        provider: databricks.workspace,
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    sandbox = databricks.Catalog("sandbox",
        metastore_id=databricks_metastore["this"]["id"],
        comment="this catalog is managed by terraform",
        properties={
            "purpose": "testing",
        })
    things = databricks.Schema("things",
        catalog_name=sandbox.id,
        comment="this database is managed by terraform",
        properties={
            "kind": "various",
        })
    thing = databricks.Table("thing",
        catalog_name=sandbox.id,
        schema_name=things.name,
        table_type="MANAGED",
        data_source_format="DELTA",
        columns=[
            databricks.TableColumnArgs(
                name="id",
                position=0,
                type_name="INT",
                type_text="int",
                type_json="{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}",
            ),
            databricks.TableColumnArgs(
                name="name",
                position=1,
                type_name="STRING",
                type_text="varchar(64)",
                type_json="{\"name\":\"name\",\"type\":\"varchar(64)\",\"nullable\":true,\"metadata\":{}}",
            ),
        ],
        comment="this table is managed by terraform",
        opts=pulumi.ResourceOptions(provider=databricks["workspace"]))
    

    Example coming soon!

    Create Table Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Table(name: string, args: TableArgs, opts?: CustomResourceOptions);
    @overload
    def Table(resource_name: str,
              args: TableArgs,
              opts: Optional[ResourceOptions] = None)
    
    @overload
    def Table(resource_name: str,
              opts: Optional[ResourceOptions] = None,
              catalog_name: Optional[str] = None,
              columns: Optional[Sequence[TableColumnArgs]] = None,
              data_source_format: Optional[str] = None,
              schema_name: Optional[str] = None,
              table_type: Optional[str] = None,
              comment: Optional[str] = None,
              name: Optional[str] = None,
              owner: Optional[str] = None,
              properties: Optional[Mapping[str, Any]] = None,
              storage_credential_name: Optional[str] = None,
              storage_location: Optional[str] = None,
              view_definition: Optional[str] = None)
    func NewTable(ctx *Context, name string, args TableArgs, opts ...ResourceOption) (*Table, error)
    public Table(string name, TableArgs args, CustomResourceOptions? opts = null)
    public Table(String name, TableArgs args)
    public Table(String name, TableArgs args, CustomResourceOptions options)
    
    type: databricks:Table
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args TableArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args TableArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args TableArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args TableArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args TableArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var tableResource = new Databricks.Table("tableResource", new()
    {
        CatalogName = "string",
        Columns = new[]
        {
            new Databricks.Inputs.TableColumnArgs
            {
                Name = "string",
                Position = 0,
                TypeName = "string",
                TypeText = "string",
                Comment = "string",
                Nullable = false,
                PartitionIndex = 0,
                TypeIntervalType = "string",
                TypeJson = "string",
                TypePrecision = 0,
                TypeScale = 0,
            },
        },
        DataSourceFormat = "string",
        SchemaName = "string",
        TableType = "string",
        Comment = "string",
        Name = "string",
        Owner = "string",
        Properties = 
        {
            { "string", "any" },
        },
        StorageCredentialName = "string",
        StorageLocation = "string",
        ViewDefinition = "string",
    });
    
    example, err := databricks.NewTable(ctx, "tableResource", &databricks.TableArgs{
    	CatalogName: pulumi.String("string"),
    	Columns: databricks.TableColumnArray{
    		&databricks.TableColumnArgs{
    			Name:             pulumi.String("string"),
    			Position:         pulumi.Int(0),
    			TypeName:         pulumi.String("string"),
    			TypeText:         pulumi.String("string"),
    			Comment:          pulumi.String("string"),
    			Nullable:         pulumi.Bool(false),
    			PartitionIndex:   pulumi.Int(0),
    			TypeIntervalType: pulumi.String("string"),
    			TypeJson:         pulumi.String("string"),
    			TypePrecision:    pulumi.Int(0),
    			TypeScale:        pulumi.Int(0),
    		},
    	},
    	DataSourceFormat: pulumi.String("string"),
    	SchemaName:       pulumi.String("string"),
    	TableType:        pulumi.String("string"),
    	Comment:          pulumi.String("string"),
    	Name:             pulumi.String("string"),
    	Owner:            pulumi.String("string"),
    	Properties: pulumi.Map{
    		"string": pulumi.Any("any"),
    	},
    	StorageCredentialName: pulumi.String("string"),
    	StorageLocation:       pulumi.String("string"),
    	ViewDefinition:        pulumi.String("string"),
    })
    
    var tableResource = new Table("tableResource", TableArgs.builder()
        .catalogName("string")
        .columns(TableColumnArgs.builder()
            .name("string")
            .position(0)
            .typeName("string")
            .typeText("string")
            .comment("string")
            .nullable(false)
            .partitionIndex(0)
            .typeIntervalType("string")
            .typeJson("string")
            .typePrecision(0)
            .typeScale(0)
            .build())
        .dataSourceFormat("string")
        .schemaName("string")
        .tableType("string")
        .comment("string")
        .name("string")
        .owner("string")
        .properties(Map.of("string", "any"))
        .storageCredentialName("string")
        .storageLocation("string")
        .viewDefinition("string")
        .build());
    
    table_resource = databricks.Table("tableResource",
        catalog_name="string",
        columns=[{
            "name": "string",
            "position": 0,
            "type_name": "string",
            "type_text": "string",
            "comment": "string",
            "nullable": False,
            "partition_index": 0,
            "type_interval_type": "string",
            "type_json": "string",
            "type_precision": 0,
            "type_scale": 0,
        }],
        data_source_format="string",
        schema_name="string",
        table_type="string",
        comment="string",
        name="string",
        owner="string",
        properties={
            "string": "any",
        },
        storage_credential_name="string",
        storage_location="string",
        view_definition="string")
    
    const tableResource = new databricks.Table("tableResource", {
        catalogName: "string",
        columns: [{
            name: "string",
            position: 0,
            typeName: "string",
            typeText: "string",
            comment: "string",
            nullable: false,
            partitionIndex: 0,
            typeIntervalType: "string",
            typeJson: "string",
            typePrecision: 0,
            typeScale: 0,
        }],
        dataSourceFormat: "string",
        schemaName: "string",
        tableType: "string",
        comment: "string",
        name: "string",
        owner: "string",
        properties: {
            string: "any",
        },
        storageCredentialName: "string",
        storageLocation: "string",
        viewDefinition: "string",
    });
    
    type: databricks:Table
    properties:
        catalogName: string
        columns:
            - comment: string
              name: string
              nullable: false
              partitionIndex: 0
              position: 0
              typeIntervalType: string
              typeJson: string
              typeName: string
              typePrecision: 0
              typeScale: 0
              typeText: string
        comment: string
        dataSourceFormat: string
        name: string
        owner: string
        properties:
            string: any
        schemaName: string
        storageCredentialName: string
        storageLocation: string
        tableType: string
        viewDefinition: string
    

    Table Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The Table resource accepts the following input properties:

    CatalogName string
    Name of parent catalog
    Columns List<TableColumn>
    DataSourceFormat string
    External tables are supported in multiple data source formats. The string constants identifying these formats are DELTA, CSV, JSON, AVRO, PARQUET, ORC, TEXT
    SchemaName string
    Name of parent Schema relative to parent Catalog
    TableType string
    Distinguishes a view vs. managed/external Table. MANAGED, EXTERNAL or VIEW
    Comment string
    User-supplied free-form text.
    Name string
    User-visible name of column
    Owner string
    Username/groupname of Table owner. Currently this field can only be changed after the resource is created.
    Properties Dictionary<string, object>
    Extensible Table properties.
    StorageCredentialName string
    For EXTERNAL Tables only: the name of storage credential to use. This cannot be updated
    StorageLocation string
    URL of storage location for Table data (required for EXTERNAL Tables. For Managed Tables, if the path is provided it needs to be a Staging Table path that has been generated through the Staging Table API, otherwise should be empty)
    ViewDefinition string
    SQL text defining the view (for table_type == "VIEW")
    CatalogName string
    Name of parent catalog
    Columns []TableColumnArgs
    DataSourceFormat string
    External tables are supported in multiple data source formats. The string constants identifying these formats are DELTA, CSV, JSON, AVRO, PARQUET, ORC, TEXT
    SchemaName string
    Name of parent Schema relative to parent Catalog
    TableType string
    Distinguishes a view vs. managed/external Table. MANAGED, EXTERNAL or VIEW
    Comment string
    User-supplied free-form text.
    Name string
    User-visible name of column
    Owner string
    Username/groupname of Table owner. Currently this field can only be changed after the resource is created.
    Properties map[string]interface{}
    Extensible Table properties.
    StorageCredentialName string
    For EXTERNAL Tables only: the name of storage credential to use. This cannot be updated
    StorageLocation string
    URL of storage location for Table data (required for EXTERNAL Tables. For Managed Tables, if the path is provided it needs to be a Staging Table path that has been generated through the Staging Table API, otherwise should be empty)
    ViewDefinition string
    SQL text defining the view (for table_type == "VIEW")
    catalogName String
    Name of parent catalog
    columns List<TableColumn>
    dataSourceFormat String
    External tables are supported in multiple data source formats. The string constants identifying these formats are DELTA, CSV, JSON, AVRO, PARQUET, ORC, TEXT
    schemaName String
    Name of parent Schema relative to parent Catalog
    tableType String
    Distinguishes a view vs. managed/external Table. MANAGED, EXTERNAL or VIEW
    comment String
    User-supplied free-form text.
    name String
    User-visible name of column
    owner String
    Username/groupname of Table owner. Currently this field can only be changed after the resource is created.
    properties Map<String,Object>
    Extensible Table properties.
    storageCredentialName String
    For EXTERNAL Tables only: the name of storage credential to use. This cannot be updated
    storageLocation String
    URL of storage location for Table data (required for EXTERNAL Tables. For Managed Tables, if the path is provided it needs to be a Staging Table path that has been generated through the Staging Table API, otherwise should be empty)
    viewDefinition String
    SQL text defining the view (for table_type == "VIEW")
    catalogName string
    Name of parent catalog
    columns TableColumn[]
    dataSourceFormat string
    External tables are supported in multiple data source formats. The string constants identifying these formats are DELTA, CSV, JSON, AVRO, PARQUET, ORC, TEXT
    schemaName string
    Name of parent Schema relative to parent Catalog
    tableType string
    Distinguishes a view vs. managed/external Table. MANAGED, EXTERNAL or VIEW
    comment string
    User-supplied free-form text.
    name string
    User-visible name of column
    owner string
    Username/groupname of Table owner. Currently this field can only be changed after the resource is created.
    properties {[key: string]: any}
    Extensible Table properties.
    storageCredentialName string
    For EXTERNAL Tables only: the name of storage credential to use. This cannot be updated
    storageLocation string
    URL of storage location for Table data (required for EXTERNAL Tables. For Managed Tables, if the path is provided it needs to be a Staging Table path that has been generated through the Staging Table API, otherwise should be empty)
    viewDefinition string
    SQL text defining the view (for table_type == "VIEW")
    catalog_name str
    Name of parent catalog
    columns Sequence[TableColumnArgs]
    data_source_format str
    External tables are supported in multiple data source formats. The string constants identifying these formats are DELTA, CSV, JSON, AVRO, PARQUET, ORC, TEXT
    schema_name str
    Name of parent Schema relative to parent Catalog
    table_type str
    Distinguishes a view vs. managed/external Table. MANAGED, EXTERNAL or VIEW
    comment str
    User-supplied free-form text.
    name str
    User-visible name of column
    owner str
    Username/groupname of Table owner. Currently this field can only be changed after the resource is created.
    properties Mapping[str, Any]
    Extensible Table properties.
    storage_credential_name str
    For EXTERNAL Tables only: the name of storage credential to use. This cannot be updated
    storage_location str
    URL of storage location for Table data (required for EXTERNAL Tables. For Managed Tables, if the path is provided it needs to be a Staging Table path that has been generated through the Staging Table API, otherwise should be empty)
    view_definition str
    SQL text defining the view (for table_type == "VIEW")
    catalogName String
    Name of parent catalog
    columns List<Property Map>
    dataSourceFormat String
    External tables are supported in multiple data source formats. The string constants identifying these formats are DELTA, CSV, JSON, AVRO, PARQUET, ORC, TEXT
    schemaName String
    Name of parent Schema relative to parent Catalog
    tableType String
    Distinguishes a view vs. managed/external Table. MANAGED, EXTERNAL or VIEW
    comment String
    User-supplied free-form text.
    name String
    User-visible name of column
    owner String
    Username/groupname of Table owner. Currently this field can only be changed after the resource is created.
    properties Map<Any>
    Extensible Table properties.
    storageCredentialName String
    For EXTERNAL Tables only: the name of storage credential to use. This cannot be updated
    storageLocation String
    URL of storage location for Table data (required for EXTERNAL Tables. For Managed Tables, if the path is provided it needs to be a Staging Table path that has been generated through the Staging Table API, otherwise should be empty)
    viewDefinition String
    SQL text defining the view (for table_type == "VIEW")

    Outputs

    All input properties are implicitly available as output properties. Additionally, the Table resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing Table Resource

    Get an existing Table resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: TableState, opts?: CustomResourceOptions): Table
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            catalog_name: Optional[str] = None,
            columns: Optional[Sequence[TableColumnArgs]] = None,
            comment: Optional[str] = None,
            data_source_format: Optional[str] = None,
            name: Optional[str] = None,
            owner: Optional[str] = None,
            properties: Optional[Mapping[str, Any]] = None,
            schema_name: Optional[str] = None,
            storage_credential_name: Optional[str] = None,
            storage_location: Optional[str] = None,
            table_type: Optional[str] = None,
            view_definition: Optional[str] = None) -> Table
    func GetTable(ctx *Context, name string, id IDInput, state *TableState, opts ...ResourceOption) (*Table, error)
    public static Table Get(string name, Input<string> id, TableState? state, CustomResourceOptions? opts = null)
    public static Table get(String name, Output<String> id, TableState state, CustomResourceOptions options)
    resources:  _:    type: databricks:Table    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    CatalogName string
    Name of parent catalog
    Columns List<TableColumn>
    Comment string
    User-supplied free-form text.
    DataSourceFormat string
    External tables are supported in multiple data source formats. The string constants identifying these formats are DELTA, CSV, JSON, AVRO, PARQUET, ORC, TEXT
    Name string
    User-visible name of column
    Owner string
    Username/groupname of Table owner. Currently this field can only be changed after the resource is created.
    Properties Dictionary<string, object>
    Extensible Table properties.
    SchemaName string
    Name of parent Schema relative to parent Catalog
    StorageCredentialName string
    For EXTERNAL Tables only: the name of storage credential to use. This cannot be updated
    StorageLocation string
    URL of storage location for Table data (required for EXTERNAL Tables. For Managed Tables, if the path is provided it needs to be a Staging Table path that has been generated through the Staging Table API, otherwise should be empty)
    TableType string
    Distinguishes a view vs. managed/external Table. MANAGED, EXTERNAL or VIEW
    ViewDefinition string
    SQL text defining the view (for table_type == "VIEW")
    CatalogName string
    Name of parent catalog
    Columns []TableColumnArgs
    Comment string
    User-supplied free-form text.
    DataSourceFormat string
    External tables are supported in multiple data source formats. The string constants identifying these formats are DELTA, CSV, JSON, AVRO, PARQUET, ORC, TEXT
    Name string
    User-visible name of column
    Owner string
    Username/groupname of Table owner. Currently this field can only be changed after the resource is created.
    Properties map[string]interface{}
    Extensible Table properties.
    SchemaName string
    Name of parent Schema relative to parent Catalog
    StorageCredentialName string
    For EXTERNAL Tables only: the name of storage credential to use. This cannot be updated
    StorageLocation string
    URL of storage location for Table data (required for EXTERNAL Tables. For Managed Tables, if the path is provided it needs to be a Staging Table path that has been generated through the Staging Table API, otherwise should be empty)
    TableType string
    Distinguishes a view vs. managed/external Table. MANAGED, EXTERNAL or VIEW
    ViewDefinition string
    SQL text defining the view (for table_type == "VIEW")
    catalogName String
    Name of parent catalog
    columns List<TableColumn>
    comment String
    User-supplied free-form text.
    dataSourceFormat String
    External tables are supported in multiple data source formats. The string constants identifying these formats are DELTA, CSV, JSON, AVRO, PARQUET, ORC, TEXT
    name String
    User-visible name of column
    owner String
    Username/groupname of Table owner. Currently this field can only be changed after the resource is created.
    properties Map<String,Object>
    Extensible Table properties.
    schemaName String
    Name of parent Schema relative to parent Catalog
    storageCredentialName String
    For EXTERNAL Tables only: the name of storage credential to use. This cannot be updated
    storageLocation String
    URL of storage location for Table data (required for EXTERNAL Tables. For Managed Tables, if the path is provided it needs to be a Staging Table path that has been generated through the Staging Table API, otherwise should be empty)
    tableType String
    Distinguishes a view vs. managed/external Table. MANAGED, EXTERNAL or VIEW
    viewDefinition String
    SQL text defining the view (for table_type == "VIEW")
    catalogName string
    Name of parent catalog
    columns TableColumn[]
    comment string
    User-supplied free-form text.
    dataSourceFormat string
    External tables are supported in multiple data source formats. The string constants identifying these formats are DELTA, CSV, JSON, AVRO, PARQUET, ORC, TEXT
    name string
    User-visible name of column
    owner string
    Username/groupname of Table owner. Currently this field can only be changed after the resource is created.
    properties {[key: string]: any}
    Extensible Table properties.
    schemaName string
    Name of parent Schema relative to parent Catalog
    storageCredentialName string
    For EXTERNAL Tables only: the name of storage credential to use. This cannot be updated
    storageLocation string
    URL of storage location for Table data (required for EXTERNAL Tables. For Managed Tables, if the path is provided it needs to be a Staging Table path that has been generated through the Staging Table API, otherwise should be empty)
    tableType string
    Distinguishes a view vs. managed/external Table. MANAGED, EXTERNAL or VIEW
    viewDefinition string
    SQL text defining the view (for table_type == "VIEW")
    catalog_name str
    Name of parent catalog
    columns Sequence[TableColumnArgs]
    comment str
    User-supplied free-form text.
    data_source_format str
    External tables are supported in multiple data source formats. The string constants identifying these formats are DELTA, CSV, JSON, AVRO, PARQUET, ORC, TEXT
    name str
    User-visible name of column
    owner str
    Username/groupname of Table owner. Currently this field can only be changed after the resource is created.
    properties Mapping[str, Any]
    Extensible Table properties.
    schema_name str
    Name of parent Schema relative to parent Catalog
    storage_credential_name str
    For EXTERNAL Tables only: the name of storage credential to use. This cannot be updated
    storage_location str
    URL of storage location for Table data (required for EXTERNAL Tables. For Managed Tables, if the path is provided it needs to be a Staging Table path that has been generated through the Staging Table API, otherwise should be empty)
    table_type str
    Distinguishes a view vs. managed/external Table. MANAGED, EXTERNAL or VIEW
    view_definition str
    SQL text defining the view (for table_type == "VIEW")
    catalogName String
    Name of parent catalog
    columns List<Property Map>
    comment String
    User-supplied free-form text.
    dataSourceFormat String
    External tables are supported in multiple data source formats. The string constants identifying these formats are DELTA, CSV, JSON, AVRO, PARQUET, ORC, TEXT
    name String
    User-visible name of column
    owner String
    Username/groupname of Table owner. Currently this field can only be changed after the resource is created.
    properties Map<Any>
    Extensible Table properties.
    schemaName String
    Name of parent Schema relative to parent Catalog
    storageCredentialName String
    For EXTERNAL Tables only: the name of storage credential to use. This cannot be updated
    storageLocation String
    URL of storage location for Table data (required for EXTERNAL Tables. For Managed Tables, if the path is provided it needs to be a Staging Table path that has been generated through the Staging Table API, otherwise should be empty)
    tableType String
    Distinguishes a view vs. managed/external Table. MANAGED, EXTERNAL or VIEW
    viewDefinition String
    SQL text defining the view (for table_type == "VIEW")

    Supporting Types

    TableColumn, TableColumnArgs

    Name string
    User-visible name of column
    Position int
    Ordinal position of column, starting at 0.
    TypeName string
    Name of (outer) type
    TypeText string
    Column type spec (with metadata) as SQL text
    Comment string
    User-supplied free-form text.
    Nullable bool
    Whether field is nullable (Default: true)
    PartitionIndex int
    Partition ID
    TypeIntervalType string
    Format of INTERVAL columns
    TypeJson string
    Column type spec (with metadata) as JSON string
    TypePrecision int
    Digits of precision; applies to DECIMAL columns
    TypeScale int
    Digits to right of decimal; applies to DECIMAL columns
    Name string
    User-visible name of column
    Position int
    Ordinal position of column, starting at 0.
    TypeName string
    Name of (outer) type
    TypeText string
    Column type spec (with metadata) as SQL text
    Comment string
    User-supplied free-form text.
    Nullable bool
    Whether field is nullable (Default: true)
    PartitionIndex int
    Partition ID
    TypeIntervalType string
    Format of INTERVAL columns
    TypeJson string
    Column type spec (with metadata) as JSON string
    TypePrecision int
    Digits of precision; applies to DECIMAL columns
    TypeScale int
    Digits to right of decimal; applies to DECIMAL columns
    name String
    User-visible name of column
    position Integer
    Ordinal position of column, starting at 0.
    typeName String
    Name of (outer) type
    typeText String
    Column type spec (with metadata) as SQL text
    comment String
    User-supplied free-form text.
    nullable Boolean
    Whether field is nullable (Default: true)
    partitionIndex Integer
    Partition ID
    typeIntervalType String
    Format of INTERVAL columns
    typeJson String
    Column type spec (with metadata) as JSON string
    typePrecision Integer
    Digits of precision; applies to DECIMAL columns
    typeScale Integer
    Digits to right of decimal; applies to DECIMAL columns
    name string
    User-visible name of column
    position number
    Ordinal position of column, starting at 0.
    typeName string
    Name of (outer) type
    typeText string
    Column type spec (with metadata) as SQL text
    comment string
    User-supplied free-form text.
    nullable boolean
    Whether field is nullable (Default: true)
    partitionIndex number
    Partition ID
    typeIntervalType string
    Format of INTERVAL columns
    typeJson string
    Column type spec (with metadata) as JSON string
    typePrecision number
    Digits of precision; applies to DECIMAL columns
    typeScale number
    Digits to right of decimal; applies to DECIMAL columns
    name str
    User-visible name of column
    position int
    Ordinal position of column, starting at 0.
    type_name str
    Name of (outer) type
    type_text str
    Column type spec (with metadata) as SQL text
    comment str
    User-supplied free-form text.
    nullable bool
    Whether field is nullable (Default: true)
    partition_index int
    Partition ID
    type_interval_type str
    Format of INTERVAL columns
    type_json str
    Column type spec (with metadata) as JSON string
    type_precision int
    Digits of precision; applies to DECIMAL columns
    type_scale int
    Digits to right of decimal; applies to DECIMAL columns
    name String
    User-visible name of column
    position Number
    Ordinal position of column, starting at 0.
    typeName String
    Name of (outer) type
    typeText String
    Column type spec (with metadata) as SQL text
    comment String
    User-supplied free-form text.
    nullable Boolean
    Whether field is nullable (Default: true)
    partitionIndex Number
    Partition ID
    typeIntervalType String
    Format of INTERVAL columns
    typeJson String
    Column type spec (with metadata) as JSON string
    typePrecision Number
    Digits of precision; applies to DECIMAL columns
    typeScale Number
    Digits to right of decimal; applies to DECIMAL columns

    Import

    This resource can be imported by full name*catalog.schema.table*bash

     $ pulumi import databricks:index/table:Table this <full-name>
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Viewing docs for Databricks v0.4.0 (Older version)
    published on Monday, Mar 9, 2026 by Pulumi
      Try Pulumi Cloud free. Your team will thank you.