databricks logo
Databricks v1.14.0, May 23 23

databricks.SqlQuery

Explore with Pulumi AI

To manage SQLA resources you must have databricks_sql_access on your databricks.Group or databricks_user.

Note: documentation for this resource is a work in progress.

A query may have one or more visualizations.

Troubleshooting

In case you see Error: cannot create sql query: Internal Server Error during pulumi up; double check that you are using the correct data_source_id

Operations on databricks.SqlQuery schedules are ⛔️ deprecated. You can create, update or delete a schedule for SQLA and other Databricks resources using the databricks.Job resource.

The following resources are often used in the same context:

  • End to end workspace management guide.
  • databricks.SqlDashboard to manage Databricks SQL Dashboards.
  • databricks.SqlEndpoint to manage Databricks SQL Endpoints.
  • databricks.SqlGlobalConfig to configure the security policy, databricks_instance_profile, and data access properties for all databricks.SqlEndpoint of workspace.
  • databricks.SqlPermissions to manage data object access control lists in Databricks workspaces for things like tables, views, databases, and more.
  • databricks.Job to schedule Databricks SQL queries (as well as dashboards and alerts) using Databricks Jobs.

Example Usage

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var sharedDir = new Databricks.Directory("sharedDir", new()
    {
        Path = "/Shared/Queries",
    });

    var q1 = new Databricks.SqlQuery("q1", new()
    {
        DataSourceId = databricks_sql_endpoint.Example.Data_source_id,
        Query = @"                        SELECT {{ p1 }} AS p1
                        WHERE 1=1
                        AND p2 in ({{ p2 }})
                        AND event_date > date '{{ p3 }}'
",
        Parent = sharedDir.ObjectId.Apply(objectId => $"folders/{objectId}"),
        RunAsRole = "viewer",
        Parameters = new[]
        {
            new Databricks.Inputs.SqlQueryParameterArgs
            {
                Name = "p1",
                Title = "Title for p1",
                Text = new Databricks.Inputs.SqlQueryParameterTextArgs
                {
                    Value = "default",
                },
            },
            new Databricks.Inputs.SqlQueryParameterArgs
            {
                Name = "p2",
                Title = "Title for p2",
                Enum = new Databricks.Inputs.SqlQueryParameterEnumArgs
                {
                    Options = new[]
                    {
                        "default",
                        "foo",
                        "bar",
                    },
                    Value = "default",
                    Multiple = new Databricks.Inputs.SqlQueryParameterEnumMultipleArgs
                    {
                        Prefix = "\"",
                        Suffix = "\"",
                        Separator = ",",
                    },
                },
            },
            new Databricks.Inputs.SqlQueryParameterArgs
            {
                Name = "p3",
                Title = "Title for p3",
                Date = new Databricks.Inputs.SqlQueryParameterDateArgs
                {
                    Value = "2022-01-01",
                },
            },
        },
        Tags = new[]
        {
            "t1",
            "t2",
        },
    });

});
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		sharedDir, err := databricks.NewDirectory(ctx, "sharedDir", &databricks.DirectoryArgs{
			Path: pulumi.String("/Shared/Queries"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewSqlQuery(ctx, "q1", &databricks.SqlQueryArgs{
			DataSourceId: pulumi.Any(databricks_sql_endpoint.Example.Data_source_id),
			Query:        pulumi.String("                        SELECT {{ p1 }} AS p1\n                        WHERE 1=1\n                        AND p2 in ({{ p2 }})\n                        AND event_date > date '{{ p3 }}'\n"),
			Parent: sharedDir.ObjectId.ApplyT(func(objectId int) (string, error) {
				return fmt.Sprintf("folders/%v", objectId), nil
			}).(pulumi.StringOutput),
			RunAsRole: pulumi.String("viewer"),
			Parameters: databricks.SqlQueryParameterArray{
				&databricks.SqlQueryParameterArgs{
					Name:  pulumi.String("p1"),
					Title: pulumi.String("Title for p1"),
					Text: &databricks.SqlQueryParameterTextArgs{
						Value: pulumi.String("default"),
					},
				},
				&databricks.SqlQueryParameterArgs{
					Name:  pulumi.String("p2"),
					Title: pulumi.String("Title for p2"),
					Enum: &databricks.SqlQueryParameterEnumArgs{
						Options: pulumi.StringArray{
							pulumi.String("default"),
							pulumi.String("foo"),
							pulumi.String("bar"),
						},
						Value: pulumi.String("default"),
						Multiple: &databricks.SqlQueryParameterEnumMultipleArgs{
							Prefix:    pulumi.String("\""),
							Suffix:    pulumi.String("\""),
							Separator: pulumi.String(","),
						},
					},
				},
				&databricks.SqlQueryParameterArgs{
					Name:  pulumi.String("p3"),
					Title: pulumi.String("Title for p3"),
					Date: &databricks.SqlQueryParameterDateArgs{
						Value: pulumi.String("2022-01-01"),
					},
				},
			},
			Tags: pulumi.StringArray{
				pulumi.String("t1"),
				pulumi.String("t2"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Directory;
import com.pulumi.databricks.DirectoryArgs;
import com.pulumi.databricks.SqlQuery;
import com.pulumi.databricks.SqlQueryArgs;
import com.pulumi.databricks.inputs.SqlQueryParameterArgs;
import com.pulumi.databricks.inputs.SqlQueryParameterTextArgs;
import com.pulumi.databricks.inputs.SqlQueryParameterEnumArgs;
import com.pulumi.databricks.inputs.SqlQueryParameterEnumMultipleArgs;
import com.pulumi.databricks.inputs.SqlQueryParameterDateArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var sharedDir = new Directory("sharedDir", DirectoryArgs.builder()        
            .path("/Shared/Queries")
            .build());

        var q1 = new SqlQuery("q1", SqlQueryArgs.builder()        
            .dataSourceId(databricks_sql_endpoint.example().data_source_id())
            .query("""
                        SELECT {{ p1 }} AS p1
                        WHERE 1=1
                        AND p2 in ({{ p2 }})
                        AND event_date > date '{{ p3 }}'
            """)
            .parent(sharedDir.objectId().applyValue(objectId -> String.format("folders/%s", objectId)))
            .runAsRole("viewer")
            .parameters(            
                SqlQueryParameterArgs.builder()
                    .name("p1")
                    .title("Title for p1")
                    .text(SqlQueryParameterTextArgs.builder()
                        .value("default")
                        .build())
                    .build(),
                SqlQueryParameterArgs.builder()
                    .name("p2")
                    .title("Title for p2")
                    .enum_(SqlQueryParameterEnumArgs.builder()
                        .options(                        
                            "default",
                            "foo",
                            "bar")
                        .value("default")
                        .multiple(SqlQueryParameterEnumMultipleArgs.builder()
                            .prefix("\"")
                            .suffix("\"")
                            .separator(",")
                            .build())
                        .build())
                    .build(),
                SqlQueryParameterArgs.builder()
                    .name("p3")
                    .title("Title for p3")
                    .date(SqlQueryParameterDateArgs.builder()
                        .value("2022-01-01")
                        .build())
                    .build())
            .tags(            
                "t1",
                "t2")
            .build());

    }
}
import pulumi
import pulumi_databricks as databricks

shared_dir = databricks.Directory("sharedDir", path="/Shared/Queries")
q1 = databricks.SqlQuery("q1",
    data_source_id=databricks_sql_endpoint["example"]["data_source_id"],
    query="""                        SELECT {{ p1 }} AS p1
                        WHERE 1=1
                        AND p2 in ({{ p2 }})
                        AND event_date > date '{{ p3 }}'
""",
    parent=shared_dir.object_id.apply(lambda object_id: f"folders/{object_id}"),
    run_as_role="viewer",
    parameters=[
        databricks.SqlQueryParameterArgs(
            name="p1",
            title="Title for p1",
            text=databricks.SqlQueryParameterTextArgs(
                value="default",
            ),
        ),
        databricks.SqlQueryParameterArgs(
            name="p2",
            title="Title for p2",
            enum=databricks.SqlQueryParameterEnumArgs(
                options=[
                    "default",
                    "foo",
                    "bar",
                ],
                value="default",
                multiple=databricks.SqlQueryParameterEnumMultipleArgs(
                    prefix="\"",
                    suffix="\"",
                    separator=",",
                ),
            ),
        ),
        databricks.SqlQueryParameterArgs(
            name="p3",
            title="Title for p3",
            date=databricks.SqlQueryParameterDateArgs(
                value="2022-01-01",
            ),
        ),
    ],
    tags=[
        "t1",
        "t2",
    ])
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const sharedDir = new databricks.Directory("sharedDir", {path: "/Shared/Queries"});
const q1 = new databricks.SqlQuery("q1", {
    dataSourceId: databricks_sql_endpoint.example.data_source_id,
    query: `                        SELECT {{ p1 }} AS p1
                        WHERE 1=1
                        AND p2 in ({{ p2 }})
                        AND event_date > date '{{ p3 }}'
`,
    parent: pulumi.interpolate`folders/${sharedDir.objectId}`,
    runAsRole: "viewer",
    parameters: [
        {
            name: "p1",
            title: "Title for p1",
            text: {
                value: "default",
            },
        },
        {
            name: "p2",
            title: "Title for p2",
            "enum": {
                options: [
                    "default",
                    "foo",
                    "bar",
                ],
                value: "default",
                multiple: {
                    prefix: "\"",
                    suffix: "\"",
                    separator: ",",
                },
            },
        },
        {
            name: "p3",
            title: "Title for p3",
            date: {
                value: "2022-01-01",
            },
        },
    ],
    tags: [
        "t1",
        "t2",
    ],
});
resources:
  sharedDir:
    type: databricks:Directory
    properties:
      path: /Shared/Queries
  q1:
    type: databricks:SqlQuery
    properties:
      dataSourceId: ${databricks_sql_endpoint.example.data_source_id}
      query: |2
                                SELECT {{ p1 }} AS p1
                                WHERE 1=1
                                AND p2 in ({{ p2 }})
                                AND event_date > date '{{ p3 }}'
      parent: folders/${sharedDir.objectId}
      runAsRole: viewer
      parameters:
        - name: p1
          title: Title for p1
          text:
            value: default
        - name: p2
          title: Title for p2
          enum:
            options:
              - default
              - foo
              - bar
            value: default
            multiple:
              prefix: '"'
              suffix: '"'
              separator: ','
        - name: p3
          title: Title for p3
          date:
            value: 2022-01-01
      tags:
        - t1
        - t2

Example permission to share query with all users

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var q1 = new Databricks.Permissions("q1", new()
    {
        SqlQueryId = databricks_sql_query.Q1.Id,
        AccessControls = new[]
        {
            new Databricks.Inputs.PermissionsAccessControlArgs
            {
                GroupName = data.Databricks_group.Users.Display_name,
                PermissionLevel = "CAN_RUN",
            },
            new Databricks.Inputs.PermissionsAccessControlArgs
            {
                GroupName = data.Databricks_group.Team.Display_name,
                PermissionLevel = "CAN_EDIT",
            },
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewPermissions(ctx, "q1", &databricks.PermissionsArgs{
			SqlQueryId: pulumi.Any(databricks_sql_query.Q1.Id),
			AccessControls: databricks.PermissionsAccessControlArray{
				&databricks.PermissionsAccessControlArgs{
					GroupName:       pulumi.Any(data.Databricks_group.Users.Display_name),
					PermissionLevel: pulumi.String("CAN_RUN"),
				},
				&databricks.PermissionsAccessControlArgs{
					GroupName:       pulumi.Any(data.Databricks_group.Team.Display_name),
					PermissionLevel: pulumi.String("CAN_EDIT"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Permissions;
import com.pulumi.databricks.PermissionsArgs;
import com.pulumi.databricks.inputs.PermissionsAccessControlArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var q1 = new Permissions("q1", PermissionsArgs.builder()        
            .sqlQueryId(databricks_sql_query.q1().id())
            .accessControls(            
                PermissionsAccessControlArgs.builder()
                    .groupName(data.databricks_group().users().display_name())
                    .permissionLevel("CAN_RUN")
                    .build(),
                PermissionsAccessControlArgs.builder()
                    .groupName(data.databricks_group().team().display_name())
                    .permissionLevel("CAN_EDIT")
                    .build())
            .build());

    }
}
import pulumi
import pulumi_databricks as databricks

q1 = databricks.Permissions("q1",
    sql_query_id=databricks_sql_query["q1"]["id"],
    access_controls=[
        databricks.PermissionsAccessControlArgs(
            group_name=%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference),
            permission_level="CAN_RUN",
        ),
        databricks.PermissionsAccessControlArgs(
            group_name=%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference),
            permission_level="CAN_EDIT",
        ),
    ])
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const q1 = new databricks.Permissions("q1", {
    sqlQueryId: databricks_sql_query.q1.id,
    accessControls: [
        {
            groupName: data.databricks_group.users.display_name,
            permissionLevel: "CAN_RUN",
        },
        {
            groupName: data.databricks_group.team.display_name,
            permissionLevel: "CAN_EDIT",
        },
    ],
});
resources:
  q1:
    type: databricks:Permissions
    properties:
      sqlQueryId: ${databricks_sql_query.q1.id}
      accessControls:
        - groupName: ${data.databricks_group.users.display_name}
          permissionLevel: CAN_RUN
        - groupName: ${data.databricks_group.team.display_name}
          permissionLevel: CAN_EDIT

Create SqlQuery Resource

new SqlQuery(name: string, args: SqlQueryArgs, opts?: CustomResourceOptions);
@overload
def SqlQuery(resource_name: str,
             opts: Optional[ResourceOptions] = None,
             data_source_id: Optional[str] = None,
             description: Optional[str] = None,
             name: Optional[str] = None,
             parameters: Optional[Sequence[SqlQueryParameterArgs]] = None,
             parent: Optional[str] = None,
             query: Optional[str] = None,
             run_as_role: Optional[str] = None,
             schedule: Optional[SqlQueryScheduleArgs] = None,
             tags: Optional[Sequence[str]] = None)
@overload
def SqlQuery(resource_name: str,
             args: SqlQueryArgs,
             opts: Optional[ResourceOptions] = None)
func NewSqlQuery(ctx *Context, name string, args SqlQueryArgs, opts ...ResourceOption) (*SqlQuery, error)
public SqlQuery(string name, SqlQueryArgs args, CustomResourceOptions? opts = null)
public SqlQuery(String name, SqlQueryArgs args)
public SqlQuery(String name, SqlQueryArgs args, CustomResourceOptions options)
type: databricks:SqlQuery
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args SqlQueryArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args SqlQueryArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args SqlQueryArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args SqlQueryArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args SqlQueryArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

SqlQuery Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The SqlQuery resource accepts the following input properties:

DataSourceId string
Query string
Description string
Name string
Parameters List<SqlQueryParameterArgs>
Parent string
RunAsRole string
Schedule SqlQueryScheduleArgs

Deprecated:

Operations on databricks_sql_query schedules are deprecated. Please use databricks_job resource to schedule a sql_task.

Tags List<string>
DataSourceId string
Query string
Description string
Name string
Parameters []SqlQueryParameterArgs
Parent string
RunAsRole string
Schedule SqlQueryScheduleArgs

Deprecated:

Operations on databricks_sql_query schedules are deprecated. Please use databricks_job resource to schedule a sql_task.

Tags []string
dataSourceId String
query String
description String
name String
parameters List<SqlQueryParameterArgs>
parent String
runAsRole String
schedule SqlQueryScheduleArgs

Deprecated:

Operations on databricks_sql_query schedules are deprecated. Please use databricks_job resource to schedule a sql_task.

tags List<String>
dataSourceId string
query string
description string
name string
parameters SqlQueryParameterArgs[]
parent string
runAsRole string
schedule SqlQueryScheduleArgs

Deprecated:

Operations on databricks_sql_query schedules are deprecated. Please use databricks_job resource to schedule a sql_task.

tags string[]
data_source_id str
query str
description str
name str
parameters Sequence[SqlQueryParameterArgs]
parent str
run_as_role str
schedule SqlQueryScheduleArgs

Deprecated:

Operations on databricks_sql_query schedules are deprecated. Please use databricks_job resource to schedule a sql_task.

tags Sequence[str]
dataSourceId String
query String
description String
name String
parameters List<Property Map>
parent String
runAsRole String
schedule Property Map

Deprecated:

Operations on databricks_sql_query schedules are deprecated. Please use databricks_job resource to schedule a sql_task.

tags List<String>

Outputs

All input properties are implicitly available as output properties. Additionally, the SqlQuery resource produces the following output properties:

Id string

The provider-assigned unique ID for this managed resource.

Id string

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

id string

The provider-assigned unique ID for this managed resource.

id str

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

Look up Existing SqlQuery Resource

Get an existing SqlQuery resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: SqlQueryState, opts?: CustomResourceOptions): SqlQuery
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        data_source_id: Optional[str] = None,
        description: Optional[str] = None,
        name: Optional[str] = None,
        parameters: Optional[Sequence[SqlQueryParameterArgs]] = None,
        parent: Optional[str] = None,
        query: Optional[str] = None,
        run_as_role: Optional[str] = None,
        schedule: Optional[SqlQueryScheduleArgs] = None,
        tags: Optional[Sequence[str]] = None) -> SqlQuery
func GetSqlQuery(ctx *Context, name string, id IDInput, state *SqlQueryState, opts ...ResourceOption) (*SqlQuery, error)
public static SqlQuery Get(string name, Input<string> id, SqlQueryState? state, CustomResourceOptions? opts = null)
public static SqlQuery get(String name, Output<String> id, SqlQueryState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
DataSourceId string
Description string
Name string
Parameters List<SqlQueryParameterArgs>
Parent string
Query string
RunAsRole string
Schedule SqlQueryScheduleArgs

Deprecated:

Operations on databricks_sql_query schedules are deprecated. Please use databricks_job resource to schedule a sql_task.

Tags List<string>
DataSourceId string
Description string
Name string
Parameters []SqlQueryParameterArgs
Parent string
Query string
RunAsRole string
Schedule SqlQueryScheduleArgs

Deprecated:

Operations on databricks_sql_query schedules are deprecated. Please use databricks_job resource to schedule a sql_task.

Tags []string
dataSourceId String
description String
name String
parameters List<SqlQueryParameterArgs>
parent String
query String
runAsRole String
schedule SqlQueryScheduleArgs

Deprecated:

Operations on databricks_sql_query schedules are deprecated. Please use databricks_job resource to schedule a sql_task.

tags List<String>
dataSourceId string
description string
name string
parameters SqlQueryParameterArgs[]
parent string
query string
runAsRole string
schedule SqlQueryScheduleArgs

Deprecated:

Operations on databricks_sql_query schedules are deprecated. Please use databricks_job resource to schedule a sql_task.

tags string[]
data_source_id str
description str
name str
parameters Sequence[SqlQueryParameterArgs]
parent str
query str
run_as_role str
schedule SqlQueryScheduleArgs

Deprecated:

Operations on databricks_sql_query schedules are deprecated. Please use databricks_job resource to schedule a sql_task.

tags Sequence[str]
dataSourceId String
description String
name String
parameters List<Property Map>
parent String
query String
runAsRole String
schedule Property Map

Deprecated:

Operations on databricks_sql_query schedules are deprecated. Please use databricks_job resource to schedule a sql_task.

tags List<String>

Supporting Types

SqlQueryParameter

SqlQueryParameterDate

Value string
Value string
value String
value string
value str
value String

SqlQueryParameterDateRange

SqlQueryParameterDateRangeRange

End string
Start string
End string
Start string
end String
start String
end string
start string
end str
start str
end String
start String

SqlQueryParameterDatetime

Value string
Value string
value String
value string
value str
value String

SqlQueryParameterDatetimeRange

SqlQueryParameterDatetimeRangeRange

End string
Start string
End string
Start string
end String
start String
end string
start string
end str
start str
end String
start String

SqlQueryParameterDatetimesec

Value string
Value string
value String
value string
value str
value String

SqlQueryParameterDatetimesecRange

SqlQueryParameterDatetimesecRangeRange

End string
Start string
End string
Start string
end String
start String
end string
start string
end str
start str
end String
start String

SqlQueryParameterEnum

options List<String>
multiple Property Map
value String
values List<String>

SqlQueryParameterEnumMultiple

Prefix string
Separator string
Suffix string
Prefix string
Separator string
Suffix string
prefix String
separator String
suffix String
prefix string
separator string
suffix string
prefix String
separator String
suffix String

SqlQueryParameterNumber

Value double
Value float64
value Double
value number
value float
value Number

SqlQueryParameterQuery

queryId String
multiple Property Map
value String
values List<String>

SqlQueryParameterQueryMultiple

Prefix string
Separator string
Suffix string
Prefix string
Separator string
Suffix string
prefix String
separator String
suffix String
prefix string
separator string
suffix string
prefix String
separator String
suffix String

SqlQueryParameterText

Value string
Value string
value String
value string
value str
value String

SqlQuerySchedule

SqlQueryScheduleContinuous

SqlQueryScheduleDaily

intervalDays Integer
timeOfDay String
untilDate String
intervalDays number
timeOfDay string
untilDate string
intervalDays Number
timeOfDay String
untilDate String

SqlQueryScheduleWeekly

dayOfWeek String
intervalWeeks Integer
timeOfDay String
untilDate String
dayOfWeek string
intervalWeeks number
timeOfDay string
untilDate string
dayOfWeek String
intervalWeeks Number
timeOfDay String
untilDate String

Import

You can import a databricks_sql_query resource with ID like the followingbash

 $ pulumi import databricks:index/sqlQuery:SqlQuery this <query-id>

Package Details

Repository
databricks pulumi/pulumi-databricks
License
Apache-2.0
Notes

This Pulumi package is based on the databricks Terraform Provider.