databricks logo
Databricks v1.14.0, May 23 23

databricks.SqlGlobalConfig

Explore with Pulumi AI

This resource configures the security policy, databricks_instance_profile, and data access properties for all databricks.SqlEndpoint of workspace. Please note that changing parameters of this resource will restart all running databricks_sql_endpoint. To use this resource you need to be an administrator.

The following resources are often used in the same context:

  • End to end workspace management guide.
  • databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
  • databricks.SqlDashboard to manage Databricks SQL Dashboards.
  • databricks.SqlEndpoint to manage Databricks SQL Endpoints.
  • databricks.SqlPermissions to manage data object access control lists in Databricks workspaces for things like tables, views, databases, and more.

Example Usage

AWS example

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var @this = new Databricks.SqlGlobalConfig("this", new()
    {
        SecurityPolicy = "DATA_ACCESS_CONTROL",
        InstanceProfileArn = "arn:....",
        DataAccessConfig = 
        {
            { "spark.sql.session.timeZone", "UTC" },
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewSqlGlobalConfig(ctx, "this", &databricks.SqlGlobalConfigArgs{
			SecurityPolicy:     pulumi.String("DATA_ACCESS_CONTROL"),
			InstanceProfileArn: pulumi.String("arn:...."),
			DataAccessConfig: pulumi.AnyMap{
				"spark.sql.session.timeZone": pulumi.Any("UTC"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.SqlGlobalConfig;
import com.pulumi.databricks.SqlGlobalConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var this_ = new SqlGlobalConfig("this", SqlGlobalConfigArgs.builder()        
            .securityPolicy("DATA_ACCESS_CONTROL")
            .instanceProfileArn("arn:....")
            .dataAccessConfig(Map.of("spark.sql.session.timeZone", "UTC"))
            .build());

    }
}
import pulumi
import pulumi_databricks as databricks

this = databricks.SqlGlobalConfig("this",
    security_policy="DATA_ACCESS_CONTROL",
    instance_profile_arn="arn:....",
    data_access_config={
        "spark.sql.session.timeZone": "UTC",
    })
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const _this = new databricks.SqlGlobalConfig("this", {
    securityPolicy: "DATA_ACCESS_CONTROL",
    instanceProfileArn: "arn:....",
    dataAccessConfig: {
        "spark.sql.session.timeZone": "UTC",
    },
});
resources:
  this:
    type: databricks:SqlGlobalConfig
    properties:
      securityPolicy: DATA_ACCESS_CONTROL
      instanceProfileArn: arn:....
      dataAccessConfig:
        spark.sql.session.timeZone: UTC

Azure example

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var @this = new Databricks.SqlGlobalConfig("this", new()
    {
        SecurityPolicy = "DATA_ACCESS_CONTROL",
        DataAccessConfig = 
        {
            { "spark.hadoop.fs.azure.account.auth.type", "OAuth" },
            { "spark.hadoop.fs.azure.account.oauth.provider.type", "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider" },
            { "spark.hadoop.fs.azure.account.oauth2.client.id", @var.Application_id },
            { "spark.hadoop.fs.azure.account.oauth2.client.secret", $"{{{{secrets/{local.Secret_scope}/{local.Secret_key}}}}}" },
            { "spark.hadoop.fs.azure.account.oauth2.client.endpoint", $"https://login.microsoftonline.com/{@var.Tenant_id}/oauth2/token" },
        },
        SqlConfigParams = 
        {
            { "ANSI_MODE", "true" },
        },
    });

});
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewSqlGlobalConfig(ctx, "this", &databricks.SqlGlobalConfigArgs{
			SecurityPolicy: pulumi.String("DATA_ACCESS_CONTROL"),
			DataAccessConfig: pulumi.AnyMap{
				"spark.hadoop.fs.azure.account.auth.type":              pulumi.Any("OAuth"),
				"spark.hadoop.fs.azure.account.oauth.provider.type":    pulumi.Any("org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider"),
				"spark.hadoop.fs.azure.account.oauth2.client.id":       pulumi.Any(_var.Application_id),
				"spark.hadoop.fs.azure.account.oauth2.client.secret":   pulumi.Any(fmt.Sprintf("{{secrets/%v/%v}}", local.Secret_scope, local.Secret_key)),
				"spark.hadoop.fs.azure.account.oauth2.client.endpoint": pulumi.Any(fmt.Sprintf("https://login.microsoftonline.com/%v/oauth2/token", _var.Tenant_id)),
			},
			SqlConfigParams: pulumi.AnyMap{
				"ANSI_MODE": pulumi.Any("true"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.SqlGlobalConfig;
import com.pulumi.databricks.SqlGlobalConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var this_ = new SqlGlobalConfig("this", SqlGlobalConfigArgs.builder()        
            .securityPolicy("DATA_ACCESS_CONTROL")
            .dataAccessConfig(Map.ofEntries(
                Map.entry("spark.hadoop.fs.azure.account.auth.type", "OAuth"),
                Map.entry("spark.hadoop.fs.azure.account.oauth.provider.type", "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider"),
                Map.entry("spark.hadoop.fs.azure.account.oauth2.client.id", var_.application_id()),
                Map.entry("spark.hadoop.fs.azure.account.oauth2.client.secret", String.format("{{{{secrets/%s/%s}}}}", local.secret_scope(),local.secret_key())),
                Map.entry("spark.hadoop.fs.azure.account.oauth2.client.endpoint", String.format("https://login.microsoftonline.com/%s/oauth2/token", var_.tenant_id()))
            ))
            .sqlConfigParams(Map.of("ANSI_MODE", "true"))
            .build());

    }
}
import pulumi
import pulumi_databricks as databricks

this = databricks.SqlGlobalConfig("this",
    security_policy="DATA_ACCESS_CONTROL",
    data_access_config={
        "spark.hadoop.fs.azure.account.auth.type": "OAuth",
        "spark.hadoop.fs.azure.account.oauth.provider.type": "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
        "spark.hadoop.fs.azure.account.oauth2.client.id": var["application_id"],
        "spark.hadoop.fs.azure.account.oauth2.client.secret": f"{{{{secrets/{local['secret_scope']}/{local['secret_key']}}}}}",
        "spark.hadoop.fs.azure.account.oauth2.client.endpoint": f"https://login.microsoftonline.com/{var['tenant_id']}/oauth2/token",
    },
    sql_config_params={
        "ANSI_MODE": "true",
    })
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const _this = new databricks.SqlGlobalConfig("this", {
    securityPolicy: "DATA_ACCESS_CONTROL",
    dataAccessConfig: {
        "spark.hadoop.fs.azure.account.auth.type": "OAuth",
        "spark.hadoop.fs.azure.account.oauth.provider.type": "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
        "spark.hadoop.fs.azure.account.oauth2.client.id": _var.application_id,
        "spark.hadoop.fs.azure.account.oauth2.client.secret": `{{secrets/${local.secret_scope}/${local.secret_key}}}`,
        "spark.hadoop.fs.azure.account.oauth2.client.endpoint": `https://login.microsoftonline.com/${_var.tenant_id}/oauth2/token`,
    },
    sqlConfigParams: {
        ANSI_MODE: "true",
    },
});
resources:
  this:
    type: databricks:SqlGlobalConfig
    properties:
      securityPolicy: DATA_ACCESS_CONTROL
      dataAccessConfig:
        spark.hadoop.fs.azure.account.auth.type: OAuth
        spark.hadoop.fs.azure.account.oauth.provider.type: org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider
        spark.hadoop.fs.azure.account.oauth2.client.id: ${var.application_id}
        spark.hadoop.fs.azure.account.oauth2.client.secret: '{{secrets/${local.secret_scope}/${local.secret_key}}}'
        spark.hadoop.fs.azure.account.oauth2.client.endpoint: https://login.microsoftonline.com/${var.tenant_id}/oauth2/token
      sqlConfigParams:
        ANSI_MODE: 'true'

Create SqlGlobalConfig Resource

new SqlGlobalConfig(name: string, args?: SqlGlobalConfigArgs, opts?: CustomResourceOptions);
@overload
def SqlGlobalConfig(resource_name: str,
                    opts: Optional[ResourceOptions] = None,
                    data_access_config: Optional[Mapping[str, Any]] = None,
                    enable_serverless_compute: Optional[bool] = None,
                    instance_profile_arn: Optional[str] = None,
                    security_policy: Optional[str] = None,
                    sql_config_params: Optional[Mapping[str, Any]] = None)
@overload
def SqlGlobalConfig(resource_name: str,
                    args: Optional[SqlGlobalConfigArgs] = None,
                    opts: Optional[ResourceOptions] = None)
func NewSqlGlobalConfig(ctx *Context, name string, args *SqlGlobalConfigArgs, opts ...ResourceOption) (*SqlGlobalConfig, error)
public SqlGlobalConfig(string name, SqlGlobalConfigArgs? args = null, CustomResourceOptions? opts = null)
public SqlGlobalConfig(String name, SqlGlobalConfigArgs args)
public SqlGlobalConfig(String name, SqlGlobalConfigArgs args, CustomResourceOptions options)
type: databricks:SqlGlobalConfig
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args SqlGlobalConfigArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args SqlGlobalConfigArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args SqlGlobalConfigArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args SqlGlobalConfigArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args SqlGlobalConfigArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

SqlGlobalConfig Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The SqlGlobalConfig resource accepts the following input properties:

DataAccessConfig Dictionary<string, object>

Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.

EnableServerlessCompute bool

Deprecated:

This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

InstanceProfileArn string

databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.

SecurityPolicy string

The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values

SqlConfigParams Dictionary<string, object>

SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

DataAccessConfig map[string]interface{}

Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.

EnableServerlessCompute bool

Deprecated:

This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

InstanceProfileArn string

databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.

SecurityPolicy string

The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values

SqlConfigParams map[string]interface{}

SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

dataAccessConfig Map<String,Object>

Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.

enableServerlessCompute Boolean

Deprecated:

This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

instanceProfileArn String

databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.

securityPolicy String

The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values

sqlConfigParams Map<String,Object>

SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

dataAccessConfig {[key: string]: any}

Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.

enableServerlessCompute boolean

Deprecated:

This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

instanceProfileArn string

databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.

securityPolicy string

The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values

sqlConfigParams {[key: string]: any}

SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

data_access_config Mapping[str, Any]

Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.

enable_serverless_compute bool

Deprecated:

This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

instance_profile_arn str

databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.

security_policy str

The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values

sql_config_params Mapping[str, Any]

SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

dataAccessConfig Map<Any>

Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.

enableServerlessCompute Boolean

Deprecated:

This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

instanceProfileArn String

databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.

securityPolicy String

The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values

sqlConfigParams Map<Any>

SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

Outputs

All input properties are implicitly available as output properties. Additionally, the SqlGlobalConfig resource produces the following output properties:

Id string

The provider-assigned unique ID for this managed resource.

Id string

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

id string

The provider-assigned unique ID for this managed resource.

id str

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

Look up Existing SqlGlobalConfig Resource

Get an existing SqlGlobalConfig resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: SqlGlobalConfigState, opts?: CustomResourceOptions): SqlGlobalConfig
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        data_access_config: Optional[Mapping[str, Any]] = None,
        enable_serverless_compute: Optional[bool] = None,
        instance_profile_arn: Optional[str] = None,
        security_policy: Optional[str] = None,
        sql_config_params: Optional[Mapping[str, Any]] = None) -> SqlGlobalConfig
func GetSqlGlobalConfig(ctx *Context, name string, id IDInput, state *SqlGlobalConfigState, opts ...ResourceOption) (*SqlGlobalConfig, error)
public static SqlGlobalConfig Get(string name, Input<string> id, SqlGlobalConfigState? state, CustomResourceOptions? opts = null)
public static SqlGlobalConfig get(String name, Output<String> id, SqlGlobalConfigState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
DataAccessConfig Dictionary<string, object>

Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.

EnableServerlessCompute bool

Deprecated:

This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

InstanceProfileArn string

databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.

SecurityPolicy string

The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values

SqlConfigParams Dictionary<string, object>

SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

DataAccessConfig map[string]interface{}

Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.

EnableServerlessCompute bool

Deprecated:

This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

InstanceProfileArn string

databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.

SecurityPolicy string

The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values

SqlConfigParams map[string]interface{}

SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

dataAccessConfig Map<String,Object>

Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.

enableServerlessCompute Boolean

Deprecated:

This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

instanceProfileArn String

databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.

securityPolicy String

The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values

sqlConfigParams Map<String,Object>

SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

dataAccessConfig {[key: string]: any}

Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.

enableServerlessCompute boolean

Deprecated:

This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

instanceProfileArn string

databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.

securityPolicy string

The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values

sqlConfigParams {[key: string]: any}

SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

data_access_config Mapping[str, Any]

Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.

enable_serverless_compute bool

Deprecated:

This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

instance_profile_arn str

databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.

security_policy str

The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values

sql_config_params Mapping[str, Any]

SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

dataAccessConfig Map<Any>

Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.

enableServerlessCompute Boolean

Deprecated:

This field is intended as an internal API and may be removed from the Databricks Terraform provider in the future

instanceProfileArn String

databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.

securityPolicy String

The policy for controlling access to datasets. Default value: DATA_ACCESS_CONTROL, consult documentation for list of possible values

sqlConfigParams Map<Any>

SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.

Import

You can import a databricks_sql_global_config resource with command like the following (you need to use global as ID)bash

 $ pulumi import databricks:index/sqlGlobalConfig:SqlGlobalConfig this global

Package Details

Repository
databricks pulumi/pulumi-databricks
License
Apache-2.0
Notes

This Pulumi package is based on the databricks Terraform Provider.