1. Packages
  2. Databricks
  3. API Docs
  4. getSqlWarehouse
Databricks v1.27.0 published on Tuesday, Dec 5, 2023 by Pulumi

databricks.getSqlWarehouse

Explore with Pulumi AI

databricks logo
Databricks v1.27.0 published on Tuesday, Dec 5, 2023 by Pulumi

    Note If you have a fully automated setup with workspaces created by databricks_mws_workspaces, please make sure to add depends_on attribute in order to prevent default auth: cannot configure default credentials errors.

    Retrieves information about a databricks.getSqlWarehouse using its id. This could be retrieved programmatically using databricks.getSqlWarehouses data source.

    The following resources are often used in the same context:

    • End to end workspace management guide.
    • databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
    • databricks.SqlDashboard to manage Databricks SQL Dashboards.
    • databricks.SqlGlobalConfig to configure the security policy, databricks_instance_profile, and data access properties for all databricks.getSqlWarehouse of workspace.
    • databricks.SqlPermissions to manage data object access control lists in Databricks workspaces for things like tables, views, databases, and more.

    Example Usage

    Retrieve attributes of each SQL warehouses in a workspace

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var allSqlWarehouses = Databricks.GetSqlWarehouses.Invoke();
    
        var allSqlWarehouse = .Select(__value => 
        {
            return Databricks.GetSqlWarehouse.Invoke(new()
            {
                Id = __value,
            });
        }).ToList();
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.GetSqlWarehouses(ctx, nil, nil)
    		if err != nil {
    			return err
    		}
    		_ := "TODO: For expression"
    		return nil
    	})
    }
    

    Coming soon!

    import pulumi
    import pulumi_databricks as databricks
    
    all_sql_warehouses = databricks.get_sql_warehouses()
    all_sql_warehouse = [databricks.get_sql_warehouse(id=__value) for __key, __value in data["databricks_sql"]["warehouses"]["ids"]]
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const allSqlWarehouses = databricks.getSqlWarehouses({});
    const allSqlWarehouse = .map(([, ]) => (databricks.getSqlWarehouse({
        id: __value,
    })));
    

    Coming soon!

    Search for a specific SQL Warehouse by name

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var all = Databricks.GetSqlWarehouse.Invoke(new()
        {
            Name = "Starter Warehouse",
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.GetSqlWarehouse(ctx, &databricks.GetSqlWarehouseArgs{
    			Name: pulumi.StringRef("Starter Warehouse"),
    		}, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.DatabricksFunctions;
    import com.pulumi.databricks.inputs.GetSqlWarehouseArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var all = DatabricksFunctions.getSqlWarehouse(GetSqlWarehouseArgs.builder()
                .name("Starter Warehouse")
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    all = databricks.get_sql_warehouse(name="Starter Warehouse")
    
    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const all = databricks.getSqlWarehouse({
        name: "Starter Warehouse",
    });
    
    variables:
      all:
        fn::invoke:
          Function: databricks:getSqlWarehouse
          Arguments:
            name: Starter Warehouse
    

    Using getSqlWarehouse

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getSqlWarehouse(args: GetSqlWarehouseArgs, opts?: InvokeOptions): Promise<GetSqlWarehouseResult>
    function getSqlWarehouseOutput(args: GetSqlWarehouseOutputArgs, opts?: InvokeOptions): Output<GetSqlWarehouseResult>
    def get_sql_warehouse(auto_stop_mins: Optional[int] = None,
                          channel: Optional[GetSqlWarehouseChannel] = None,
                          cluster_size: Optional[str] = None,
                          data_source_id: Optional[str] = None,
                          enable_photon: Optional[bool] = None,
                          enable_serverless_compute: Optional[bool] = None,
                          id: Optional[str] = None,
                          instance_profile_arn: Optional[str] = None,
                          jdbc_url: Optional[str] = None,
                          max_num_clusters: Optional[int] = None,
                          min_num_clusters: Optional[int] = None,
                          name: Optional[str] = None,
                          num_clusters: Optional[int] = None,
                          odbc_params: Optional[GetSqlWarehouseOdbcParams] = None,
                          spot_instance_policy: Optional[str] = None,
                          state: Optional[str] = None,
                          tags: Optional[GetSqlWarehouseTags] = None,
                          opts: Optional[InvokeOptions] = None) -> GetSqlWarehouseResult
    def get_sql_warehouse_output(auto_stop_mins: Optional[pulumi.Input[int]] = None,
                          channel: Optional[pulumi.Input[GetSqlWarehouseChannelArgs]] = None,
                          cluster_size: Optional[pulumi.Input[str]] = None,
                          data_source_id: Optional[pulumi.Input[str]] = None,
                          enable_photon: Optional[pulumi.Input[bool]] = None,
                          enable_serverless_compute: Optional[pulumi.Input[bool]] = None,
                          id: Optional[pulumi.Input[str]] = None,
                          instance_profile_arn: Optional[pulumi.Input[str]] = None,
                          jdbc_url: Optional[pulumi.Input[str]] = None,
                          max_num_clusters: Optional[pulumi.Input[int]] = None,
                          min_num_clusters: Optional[pulumi.Input[int]] = None,
                          name: Optional[pulumi.Input[str]] = None,
                          num_clusters: Optional[pulumi.Input[int]] = None,
                          odbc_params: Optional[pulumi.Input[GetSqlWarehouseOdbcParamsArgs]] = None,
                          spot_instance_policy: Optional[pulumi.Input[str]] = None,
                          state: Optional[pulumi.Input[str]] = None,
                          tags: Optional[pulumi.Input[GetSqlWarehouseTagsArgs]] = None,
                          opts: Optional[InvokeOptions] = None) -> Output[GetSqlWarehouseResult]
    func GetSqlWarehouse(ctx *Context, args *GetSqlWarehouseArgs, opts ...InvokeOption) (*GetSqlWarehouseResult, error)
    func GetSqlWarehouseOutput(ctx *Context, args *GetSqlWarehouseOutputArgs, opts ...InvokeOption) GetSqlWarehouseResultOutput

    > Note: This function is named GetSqlWarehouse in the Go SDK.

    public static class GetSqlWarehouse 
    {
        public static Task<GetSqlWarehouseResult> InvokeAsync(GetSqlWarehouseArgs args, InvokeOptions? opts = null)
        public static Output<GetSqlWarehouseResult> Invoke(GetSqlWarehouseInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetSqlWarehouseResult> getSqlWarehouse(GetSqlWarehouseArgs args, InvokeOptions options)
    // Output-based functions aren't available in Java yet
    
    fn::invoke:
      function: databricks:index/getSqlWarehouse:getSqlWarehouse
      arguments:
        # arguments dictionary

    The following arguments are supported:

    AutoStopMins int

    Time in minutes until an idle SQL warehouse terminates all clusters and stops.

    Channel GetSqlWarehouseChannel

    block, consisting of following fields:

    ClusterSize string

    The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".

    DataSourceId string

    ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.

    EnablePhoton bool

    Whether Photon is enabled.

    EnableServerlessCompute bool

    Whether this SQL warehouse is a serverless SQL warehouse.

    Id string

    The ID of the SQL warehouse.

    InstanceProfileArn string
    JdbcUrl string

    JDBC connection string.

    MaxNumClusters int

    Maximum number of clusters available when a SQL warehouse is running.

    MinNumClusters int

    Minimum number of clusters available when a SQL warehouse is running.

    Name string

    Name of the SQL warehouse to search (case-sensitive).

    NumClusters int
    OdbcParams GetSqlWarehouseOdbcParams

    ODBC connection params: odbc_params.hostname, odbc_params.path, odbc_params.protocol, and odbc_params.port.

    SpotInstancePolicy string

    The spot policy to use for allocating instances to clusters: COST_OPTIMIZED or RELIABILITY_OPTIMIZED.

    State string
    Tags GetSqlWarehouseTags

    tags used for SQL warehouse resources.

    AutoStopMins int

    Time in minutes until an idle SQL warehouse terminates all clusters and stops.

    Channel GetSqlWarehouseChannel

    block, consisting of following fields:

    ClusterSize string

    The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".

    DataSourceId string

    ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.

    EnablePhoton bool

    Whether Photon is enabled.

    EnableServerlessCompute bool

    Whether this SQL warehouse is a serverless SQL warehouse.

    Id string

    The ID of the SQL warehouse.

    InstanceProfileArn string
    JdbcUrl string

    JDBC connection string.

    MaxNumClusters int

    Maximum number of clusters available when a SQL warehouse is running.

    MinNumClusters int

    Minimum number of clusters available when a SQL warehouse is running.

    Name string

    Name of the SQL warehouse to search (case-sensitive).

    NumClusters int
    OdbcParams GetSqlWarehouseOdbcParams

    ODBC connection params: odbc_params.hostname, odbc_params.path, odbc_params.protocol, and odbc_params.port.

    SpotInstancePolicy string

    The spot policy to use for allocating instances to clusters: COST_OPTIMIZED or RELIABILITY_OPTIMIZED.

    State string
    Tags GetSqlWarehouseTags

    tags used for SQL warehouse resources.

    autoStopMins Integer

    Time in minutes until an idle SQL warehouse terminates all clusters and stops.

    channel GetSqlWarehouseChannel

    block, consisting of following fields:

    clusterSize String

    The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".

    dataSourceId String

    ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.

    enablePhoton Boolean

    Whether Photon is enabled.

    enableServerlessCompute Boolean

    Whether this SQL warehouse is a serverless SQL warehouse.

    id String

    The ID of the SQL warehouse.

    instanceProfileArn String
    jdbcUrl String

    JDBC connection string.

    maxNumClusters Integer

    Maximum number of clusters available when a SQL warehouse is running.

    minNumClusters Integer

    Minimum number of clusters available when a SQL warehouse is running.

    name String

    Name of the SQL warehouse to search (case-sensitive).

    numClusters Integer
    odbcParams GetSqlWarehouseOdbcParams

    ODBC connection params: odbc_params.hostname, odbc_params.path, odbc_params.protocol, and odbc_params.port.

    spotInstancePolicy String

    The spot policy to use for allocating instances to clusters: COST_OPTIMIZED or RELIABILITY_OPTIMIZED.

    state String
    tags GetSqlWarehouseTags

    tags used for SQL warehouse resources.

    autoStopMins number

    Time in minutes until an idle SQL warehouse terminates all clusters and stops.

    channel GetSqlWarehouseChannel

    block, consisting of following fields:

    clusterSize string

    The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".

    dataSourceId string

    ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.

    enablePhoton boolean

    Whether Photon is enabled.

    enableServerlessCompute boolean

    Whether this SQL warehouse is a serverless SQL warehouse.

    id string

    The ID of the SQL warehouse.

    instanceProfileArn string
    jdbcUrl string

    JDBC connection string.

    maxNumClusters number

    Maximum number of clusters available when a SQL warehouse is running.

    minNumClusters number

    Minimum number of clusters available when a SQL warehouse is running.

    name string

    Name of the SQL warehouse to search (case-sensitive).

    numClusters number
    odbcParams GetSqlWarehouseOdbcParams

    ODBC connection params: odbc_params.hostname, odbc_params.path, odbc_params.protocol, and odbc_params.port.

    spotInstancePolicy string

    The spot policy to use for allocating instances to clusters: COST_OPTIMIZED or RELIABILITY_OPTIMIZED.

    state string
    tags GetSqlWarehouseTags

    tags used for SQL warehouse resources.

    auto_stop_mins int

    Time in minutes until an idle SQL warehouse terminates all clusters and stops.

    channel GetSqlWarehouseChannel

    block, consisting of following fields:

    cluster_size str

    The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".

    data_source_id str

    ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.

    enable_photon bool

    Whether Photon is enabled.

    enable_serverless_compute bool

    Whether this SQL warehouse is a serverless SQL warehouse.

    id str

    The ID of the SQL warehouse.

    instance_profile_arn str
    jdbc_url str

    JDBC connection string.

    max_num_clusters int

    Maximum number of clusters available when a SQL warehouse is running.

    min_num_clusters int

    Minimum number of clusters available when a SQL warehouse is running.

    name str

    Name of the SQL warehouse to search (case-sensitive).

    num_clusters int
    odbc_params GetSqlWarehouseOdbcParams

    ODBC connection params: odbc_params.hostname, odbc_params.path, odbc_params.protocol, and odbc_params.port.

    spot_instance_policy str

    The spot policy to use for allocating instances to clusters: COST_OPTIMIZED or RELIABILITY_OPTIMIZED.

    state str
    tags GetSqlWarehouseTags

    tags used for SQL warehouse resources.

    autoStopMins Number

    Time in minutes until an idle SQL warehouse terminates all clusters and stops.

    channel Property Map

    block, consisting of following fields:

    clusterSize String

    The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".

    dataSourceId String

    ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.

    enablePhoton Boolean

    Whether Photon is enabled.

    enableServerlessCompute Boolean

    Whether this SQL warehouse is a serverless SQL warehouse.

    id String

    The ID of the SQL warehouse.

    instanceProfileArn String
    jdbcUrl String

    JDBC connection string.

    maxNumClusters Number

    Maximum number of clusters available when a SQL warehouse is running.

    minNumClusters Number

    Minimum number of clusters available when a SQL warehouse is running.

    name String

    Name of the SQL warehouse to search (case-sensitive).

    numClusters Number
    odbcParams Property Map

    ODBC connection params: odbc_params.hostname, odbc_params.path, odbc_params.protocol, and odbc_params.port.

    spotInstancePolicy String

    The spot policy to use for allocating instances to clusters: COST_OPTIMIZED or RELIABILITY_OPTIMIZED.

    state String
    tags Property Map

    tags used for SQL warehouse resources.

    getSqlWarehouse Result

    The following output properties are available:

    AutoStopMins int

    Time in minutes until an idle SQL warehouse terminates all clusters and stops.

    Channel GetSqlWarehouseChannel

    block, consisting of following fields:

    ClusterSize string

    The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".

    DataSourceId string

    ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.

    EnablePhoton bool

    Whether Photon is enabled.

    EnableServerlessCompute bool

    Whether this SQL warehouse is a serverless SQL warehouse.

    Id string

    The ID of the SQL warehouse.

    InstanceProfileArn string
    JdbcUrl string

    JDBC connection string.

    MaxNumClusters int

    Maximum number of clusters available when a SQL warehouse is running.

    MinNumClusters int

    Minimum number of clusters available when a SQL warehouse is running.

    Name string

    Name of the Databricks SQL release channel. Possible values are: CHANNEL_NAME_PREVIEW and CHANNEL_NAME_CURRENT. Default is CHANNEL_NAME_CURRENT.

    NumClusters int
    OdbcParams GetSqlWarehouseOdbcParams

    ODBC connection params: odbc_params.hostname, odbc_params.path, odbc_params.protocol, and odbc_params.port.

    SpotInstancePolicy string

    The spot policy to use for allocating instances to clusters: COST_OPTIMIZED or RELIABILITY_OPTIMIZED.

    State string
    Tags GetSqlWarehouseTags

    tags used for SQL warehouse resources.

    AutoStopMins int

    Time in minutes until an idle SQL warehouse terminates all clusters and stops.

    Channel GetSqlWarehouseChannel

    block, consisting of following fields:

    ClusterSize string

    The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".

    DataSourceId string

    ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.

    EnablePhoton bool

    Whether Photon is enabled.

    EnableServerlessCompute bool

    Whether this SQL warehouse is a serverless SQL warehouse.

    Id string

    The ID of the SQL warehouse.

    InstanceProfileArn string
    JdbcUrl string

    JDBC connection string.

    MaxNumClusters int

    Maximum number of clusters available when a SQL warehouse is running.

    MinNumClusters int

    Minimum number of clusters available when a SQL warehouse is running.

    Name string

    Name of the Databricks SQL release channel. Possible values are: CHANNEL_NAME_PREVIEW and CHANNEL_NAME_CURRENT. Default is CHANNEL_NAME_CURRENT.

    NumClusters int
    OdbcParams GetSqlWarehouseOdbcParams

    ODBC connection params: odbc_params.hostname, odbc_params.path, odbc_params.protocol, and odbc_params.port.

    SpotInstancePolicy string

    The spot policy to use for allocating instances to clusters: COST_OPTIMIZED or RELIABILITY_OPTIMIZED.

    State string
    Tags GetSqlWarehouseTags

    tags used for SQL warehouse resources.

    autoStopMins Integer

    Time in minutes until an idle SQL warehouse terminates all clusters and stops.

    channel GetSqlWarehouseChannel

    block, consisting of following fields:

    clusterSize String

    The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".

    dataSourceId String

    ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.

    enablePhoton Boolean

    Whether Photon is enabled.

    enableServerlessCompute Boolean

    Whether this SQL warehouse is a serverless SQL warehouse.

    id String

    The ID of the SQL warehouse.

    instanceProfileArn String
    jdbcUrl String

    JDBC connection string.

    maxNumClusters Integer

    Maximum number of clusters available when a SQL warehouse is running.

    minNumClusters Integer

    Minimum number of clusters available when a SQL warehouse is running.

    name String

    Name of the Databricks SQL release channel. Possible values are: CHANNEL_NAME_PREVIEW and CHANNEL_NAME_CURRENT. Default is CHANNEL_NAME_CURRENT.

    numClusters Integer
    odbcParams GetSqlWarehouseOdbcParams

    ODBC connection params: odbc_params.hostname, odbc_params.path, odbc_params.protocol, and odbc_params.port.

    spotInstancePolicy String

    The spot policy to use for allocating instances to clusters: COST_OPTIMIZED or RELIABILITY_OPTIMIZED.

    state String
    tags GetSqlWarehouseTags

    tags used for SQL warehouse resources.

    autoStopMins number

    Time in minutes until an idle SQL warehouse terminates all clusters and stops.

    channel GetSqlWarehouseChannel

    block, consisting of following fields:

    clusterSize string

    The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".

    dataSourceId string

    ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.

    enablePhoton boolean

    Whether Photon is enabled.

    enableServerlessCompute boolean

    Whether this SQL warehouse is a serverless SQL warehouse.

    id string

    The ID of the SQL warehouse.

    instanceProfileArn string
    jdbcUrl string

    JDBC connection string.

    maxNumClusters number

    Maximum number of clusters available when a SQL warehouse is running.

    minNumClusters number

    Minimum number of clusters available when a SQL warehouse is running.

    name string

    Name of the Databricks SQL release channel. Possible values are: CHANNEL_NAME_PREVIEW and CHANNEL_NAME_CURRENT. Default is CHANNEL_NAME_CURRENT.

    numClusters number
    odbcParams GetSqlWarehouseOdbcParams

    ODBC connection params: odbc_params.hostname, odbc_params.path, odbc_params.protocol, and odbc_params.port.

    spotInstancePolicy string

    The spot policy to use for allocating instances to clusters: COST_OPTIMIZED or RELIABILITY_OPTIMIZED.

    state string
    tags GetSqlWarehouseTags

    tags used for SQL warehouse resources.

    auto_stop_mins int

    Time in minutes until an idle SQL warehouse terminates all clusters and stops.

    channel GetSqlWarehouseChannel

    block, consisting of following fields:

    cluster_size str

    The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".

    data_source_id str

    ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.

    enable_photon bool

    Whether Photon is enabled.

    enable_serverless_compute bool

    Whether this SQL warehouse is a serverless SQL warehouse.

    id str

    The ID of the SQL warehouse.

    instance_profile_arn str
    jdbc_url str

    JDBC connection string.

    max_num_clusters int

    Maximum number of clusters available when a SQL warehouse is running.

    min_num_clusters int

    Minimum number of clusters available when a SQL warehouse is running.

    name str

    Name of the Databricks SQL release channel. Possible values are: CHANNEL_NAME_PREVIEW and CHANNEL_NAME_CURRENT. Default is CHANNEL_NAME_CURRENT.

    num_clusters int
    odbc_params GetSqlWarehouseOdbcParams

    ODBC connection params: odbc_params.hostname, odbc_params.path, odbc_params.protocol, and odbc_params.port.

    spot_instance_policy str

    The spot policy to use for allocating instances to clusters: COST_OPTIMIZED or RELIABILITY_OPTIMIZED.

    state str
    tags GetSqlWarehouseTags

    tags used for SQL warehouse resources.

    autoStopMins Number

    Time in minutes until an idle SQL warehouse terminates all clusters and stops.

    channel Property Map

    block, consisting of following fields:

    clusterSize String

    The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".

    dataSourceId String

    ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.

    enablePhoton Boolean

    Whether Photon is enabled.

    enableServerlessCompute Boolean

    Whether this SQL warehouse is a serverless SQL warehouse.

    id String

    The ID of the SQL warehouse.

    instanceProfileArn String
    jdbcUrl String

    JDBC connection string.

    maxNumClusters Number

    Maximum number of clusters available when a SQL warehouse is running.

    minNumClusters Number

    Minimum number of clusters available when a SQL warehouse is running.

    name String

    Name of the Databricks SQL release channel. Possible values are: CHANNEL_NAME_PREVIEW and CHANNEL_NAME_CURRENT. Default is CHANNEL_NAME_CURRENT.

    numClusters Number
    odbcParams Property Map

    ODBC connection params: odbc_params.hostname, odbc_params.path, odbc_params.protocol, and odbc_params.port.

    spotInstancePolicy String

    The spot policy to use for allocating instances to clusters: COST_OPTIMIZED or RELIABILITY_OPTIMIZED.

    state String
    tags Property Map

    tags used for SQL warehouse resources.

    Supporting Types

    GetSqlWarehouseChannel

    Name string

    Name of the SQL warehouse to search (case-sensitive).

    Name string

    Name of the SQL warehouse to search (case-sensitive).

    name String

    Name of the SQL warehouse to search (case-sensitive).

    name string

    Name of the SQL warehouse to search (case-sensitive).

    name str

    Name of the SQL warehouse to search (case-sensitive).

    name String

    Name of the SQL warehouse to search (case-sensitive).

    GetSqlWarehouseOdbcParams

    Path string
    Port int
    Protocol string
    Host string
    Hostname string
    Path string
    Port int
    Protocol string
    Host string
    Hostname string
    path String
    port Integer
    protocol String
    host String
    hostname String
    path string
    port number
    protocol string
    host string
    hostname string
    path str
    port int
    protocol str
    host str
    hostname str
    path String
    port Number
    protocol String
    host String
    hostname String

    GetSqlWarehouseTags

    GetSqlWarehouseTagsCustomTag

    Key string
    Value string
    Key string
    Value string
    key String
    value String
    key string
    value string
    key str
    value str
    key String
    value String

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes

    This Pulumi package is based on the databricks Terraform Provider.

    databricks logo
    Databricks v1.27.0 published on Tuesday, Dec 5, 2023 by Pulumi