databricks.SqlQuery
Explore with Pulumi AI
To manage SQLA resources you must have databricks_sql_access
on your databricks.Group or databricks_user.
Note: documentation for this resource is a work in progress.
A query may have one or more visualizations.
Troubleshooting
In case you see Error: cannot create sql query: Internal Server Error
during pulumi up
; double check that you are using the correct data_source_id
Operations on databricks.SqlQuery
schedules are ⛔️ deprecated. You can create, update or delete a schedule for SQLA and other Databricks resources using the databricks.Job resource.
Related Resources
The following resources are often used in the same context:
- End to end workspace management guide.
- databricks.SqlDashboard to manage Databricks SQL Dashboards.
- databricks.SqlEndpoint to manage Databricks SQL Endpoints.
- databricks.SqlGlobalConfig to configure the security policy, databricks_instance_profile, and data access properties for all databricks.SqlEndpoint of workspace.
- databricks.SqlPermissions to manage data object access control lists in Databricks workspaces for things like tables, views, databases, and more.
- databricks.Job to schedule Databricks SQL queries (as well as dashboards and alerts) using Databricks Jobs.
Example Usage
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var sharedDir = new Databricks.Directory("sharedDir", new()
{
Path = "/Shared/Queries",
});
var q1 = new Databricks.SqlQuery("q1", new()
{
DataSourceId = databricks_sql_endpoint.Example.Data_source_id,
Query = @" SELECT {{ p1 }} AS p1
WHERE 1=1
AND p2 in ({{ p2 }})
AND event_date > date '{{ p3 }}'
",
Parent = sharedDir.ObjectId.Apply(objectId => $"folders/{objectId}"),
RunAsRole = "viewer",
Parameters = new[]
{
new Databricks.Inputs.SqlQueryParameterArgs
{
Name = "p1",
Title = "Title for p1",
Text = new Databricks.Inputs.SqlQueryParameterTextArgs
{
Value = "default",
},
},
new Databricks.Inputs.SqlQueryParameterArgs
{
Name = "p2",
Title = "Title for p2",
Enum = new Databricks.Inputs.SqlQueryParameterEnumArgs
{
Options = new[]
{
"default",
"foo",
"bar",
},
Value = "default",
Multiple = new Databricks.Inputs.SqlQueryParameterEnumMultipleArgs
{
Prefix = "\"",
Suffix = "\"",
Separator = ",",
},
},
},
new Databricks.Inputs.SqlQueryParameterArgs
{
Name = "p3",
Title = "Title for p3",
Date = new Databricks.Inputs.SqlQueryParameterDateArgs
{
Value = "2022-01-01",
},
},
},
Tags = new[]
{
"t1",
"t2",
},
});
});
package main
import (
"fmt"
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
sharedDir, err := databricks.NewDirectory(ctx, "sharedDir", &databricks.DirectoryArgs{
Path: pulumi.String("/Shared/Queries"),
})
if err != nil {
return err
}
_, err = databricks.NewSqlQuery(ctx, "q1", &databricks.SqlQueryArgs{
DataSourceId: pulumi.Any(databricks_sql_endpoint.Example.Data_source_id),
Query: pulumi.String(" SELECT {{ p1 }} AS p1\n WHERE 1=1\n AND p2 in ({{ p2 }})\n AND event_date > date '{{ p3 }}'\n"),
Parent: sharedDir.ObjectId.ApplyT(func(objectId int) (string, error) {
return fmt.Sprintf("folders/%v", objectId), nil
}).(pulumi.StringOutput),
RunAsRole: pulumi.String("viewer"),
Parameters: databricks.SqlQueryParameterArray{
&databricks.SqlQueryParameterArgs{
Name: pulumi.String("p1"),
Title: pulumi.String("Title for p1"),
Text: &databricks.SqlQueryParameterTextArgs{
Value: pulumi.String("default"),
},
},
&databricks.SqlQueryParameterArgs{
Name: pulumi.String("p2"),
Title: pulumi.String("Title for p2"),
Enum: &databricks.SqlQueryParameterEnumArgs{
Options: pulumi.StringArray{
pulumi.String("default"),
pulumi.String("foo"),
pulumi.String("bar"),
},
Value: pulumi.String("default"),
Multiple: &databricks.SqlQueryParameterEnumMultipleArgs{
Prefix: pulumi.String("\""),
Suffix: pulumi.String("\""),
Separator: pulumi.String(","),
},
},
},
&databricks.SqlQueryParameterArgs{
Name: pulumi.String("p3"),
Title: pulumi.String("Title for p3"),
Date: &databricks.SqlQueryParameterDateArgs{
Value: pulumi.String("2022-01-01"),
},
},
},
Tags: pulumi.StringArray{
pulumi.String("t1"),
pulumi.String("t2"),
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Directory;
import com.pulumi.databricks.DirectoryArgs;
import com.pulumi.databricks.SqlQuery;
import com.pulumi.databricks.SqlQueryArgs;
import com.pulumi.databricks.inputs.SqlQueryParameterArgs;
import com.pulumi.databricks.inputs.SqlQueryParameterTextArgs;
import com.pulumi.databricks.inputs.SqlQueryParameterEnumArgs;
import com.pulumi.databricks.inputs.SqlQueryParameterEnumMultipleArgs;
import com.pulumi.databricks.inputs.SqlQueryParameterDateArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var sharedDir = new Directory("sharedDir", DirectoryArgs.builder()
.path("/Shared/Queries")
.build());
var q1 = new SqlQuery("q1", SqlQueryArgs.builder()
.dataSourceId(databricks_sql_endpoint.example().data_source_id())
.query("""
SELECT {{ p1 }} AS p1
WHERE 1=1
AND p2 in ({{ p2 }})
AND event_date > date '{{ p3 }}'
""")
.parent(sharedDir.objectId().applyValue(objectId -> String.format("folders/%s", objectId)))
.runAsRole("viewer")
.parameters(
SqlQueryParameterArgs.builder()
.name("p1")
.title("Title for p1")
.text(SqlQueryParameterTextArgs.builder()
.value("default")
.build())
.build(),
SqlQueryParameterArgs.builder()
.name("p2")
.title("Title for p2")
.enum_(SqlQueryParameterEnumArgs.builder()
.options(
"default",
"foo",
"bar")
.value("default")
.multiple(SqlQueryParameterEnumMultipleArgs.builder()
.prefix("\"")
.suffix("\"")
.separator(",")
.build())
.build())
.build(),
SqlQueryParameterArgs.builder()
.name("p3")
.title("Title for p3")
.date(SqlQueryParameterDateArgs.builder()
.value("2022-01-01")
.build())
.build())
.tags(
"t1",
"t2")
.build());
}
}
import pulumi
import pulumi_databricks as databricks
shared_dir = databricks.Directory("sharedDir", path="/Shared/Queries")
q1 = databricks.SqlQuery("q1",
data_source_id=databricks_sql_endpoint["example"]["data_source_id"],
query=""" SELECT {{ p1 }} AS p1
WHERE 1=1
AND p2 in ({{ p2 }})
AND event_date > date '{{ p3 }}'
""",
parent=shared_dir.object_id.apply(lambda object_id: f"folders/{object_id}"),
run_as_role="viewer",
parameters=[
databricks.SqlQueryParameterArgs(
name="p1",
title="Title for p1",
text=databricks.SqlQueryParameterTextArgs(
value="default",
),
),
databricks.SqlQueryParameterArgs(
name="p2",
title="Title for p2",
enum=databricks.SqlQueryParameterEnumArgs(
options=[
"default",
"foo",
"bar",
],
value="default",
multiple=databricks.SqlQueryParameterEnumMultipleArgs(
prefix="\"",
suffix="\"",
separator=",",
),
),
),
databricks.SqlQueryParameterArgs(
name="p3",
title="Title for p3",
date=databricks.SqlQueryParameterDateArgs(
value="2022-01-01",
),
),
],
tags=[
"t1",
"t2",
])
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const sharedDir = new databricks.Directory("sharedDir", {path: "/Shared/Queries"});
const q1 = new databricks.SqlQuery("q1", {
dataSourceId: databricks_sql_endpoint.example.data_source_id,
query: ` SELECT {{ p1 }} AS p1
WHERE 1=1
AND p2 in ({{ p2 }})
AND event_date > date '{{ p3 }}'
`,
parent: pulumi.interpolate`folders/${sharedDir.objectId}`,
runAsRole: "viewer",
parameters: [
{
name: "p1",
title: "Title for p1",
text: {
value: "default",
},
},
{
name: "p2",
title: "Title for p2",
"enum": {
options: [
"default",
"foo",
"bar",
],
value: "default",
multiple: {
prefix: "\"",
suffix: "\"",
separator: ",",
},
},
},
{
name: "p3",
title: "Title for p3",
date: {
value: "2022-01-01",
},
},
],
tags: [
"t1",
"t2",
],
});
resources:
sharedDir:
type: databricks:Directory
properties:
path: /Shared/Queries
q1:
type: databricks:SqlQuery
properties:
dataSourceId: ${databricks_sql_endpoint.example.data_source_id}
query: |2
SELECT {{ p1 }} AS p1
WHERE 1=1
AND p2 in ({{ p2 }})
AND event_date > date '{{ p3 }}'
parent: folders/${sharedDir.objectId}
runAsRole: viewer
parameters:
- name: p1
title: Title for p1
text:
value: default
- name: p2
title: Title for p2
enum:
options:
- default
- foo
- bar
value: default
multiple:
prefix: '"'
suffix: '"'
separator: ','
- name: p3
title: Title for p3
date:
value: 2022-01-01
tags:
- t1
- t2
Example permission to share query with all users
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var q1 = new Databricks.Permissions("q1", new()
{
SqlQueryId = databricks_sql_query.Q1.Id,
AccessControls = new[]
{
new Databricks.Inputs.PermissionsAccessControlArgs
{
GroupName = data.Databricks_group.Users.Display_name,
PermissionLevel = "CAN_RUN",
},
new Databricks.Inputs.PermissionsAccessControlArgs
{
GroupName = data.Databricks_group.Team.Display_name,
PermissionLevel = "CAN_EDIT",
},
},
});
});
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewPermissions(ctx, "q1", &databricks.PermissionsArgs{
SqlQueryId: pulumi.Any(databricks_sql_query.Q1.Id),
AccessControls: databricks.PermissionsAccessControlArray{
&databricks.PermissionsAccessControlArgs{
GroupName: pulumi.Any(data.Databricks_group.Users.Display_name),
PermissionLevel: pulumi.String("CAN_RUN"),
},
&databricks.PermissionsAccessControlArgs{
GroupName: pulumi.Any(data.Databricks_group.Team.Display_name),
PermissionLevel: pulumi.String("CAN_EDIT"),
},
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Permissions;
import com.pulumi.databricks.PermissionsArgs;
import com.pulumi.databricks.inputs.PermissionsAccessControlArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var q1 = new Permissions("q1", PermissionsArgs.builder()
.sqlQueryId(databricks_sql_query.q1().id())
.accessControls(
PermissionsAccessControlArgs.builder()
.groupName(data.databricks_group().users().display_name())
.permissionLevel("CAN_RUN")
.build(),
PermissionsAccessControlArgs.builder()
.groupName(data.databricks_group().team().display_name())
.permissionLevel("CAN_EDIT")
.build())
.build());
}
}
import pulumi
import pulumi_databricks as databricks
q1 = databricks.Permissions("q1",
sql_query_id=databricks_sql_query["q1"]["id"],
access_controls=[
databricks.PermissionsAccessControlArgs(
group_name=%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference),
permission_level="CAN_RUN",
),
databricks.PermissionsAccessControlArgs(
group_name=%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference),
permission_level="CAN_EDIT",
),
])
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const q1 = new databricks.Permissions("q1", {
sqlQueryId: databricks_sql_query.q1.id,
accessControls: [
{
groupName: data.databricks_group.users.display_name,
permissionLevel: "CAN_RUN",
},
{
groupName: data.databricks_group.team.display_name,
permissionLevel: "CAN_EDIT",
},
],
});
resources:
q1:
type: databricks:Permissions
properties:
sqlQueryId: ${databricks_sql_query.q1.id}
accessControls:
- groupName: ${data.databricks_group.users.display_name}
permissionLevel: CAN_RUN
- groupName: ${data.databricks_group.team.display_name}
permissionLevel: CAN_EDIT
Create SqlQuery Resource
new SqlQuery(name: string, args: SqlQueryArgs, opts?: CustomResourceOptions);
@overload
def SqlQuery(resource_name: str,
opts: Optional[ResourceOptions] = None,
data_source_id: Optional[str] = None,
description: Optional[str] = None,
name: Optional[str] = None,
parameters: Optional[Sequence[SqlQueryParameterArgs]] = None,
parent: Optional[str] = None,
query: Optional[str] = None,
run_as_role: Optional[str] = None,
schedule: Optional[SqlQueryScheduleArgs] = None,
tags: Optional[Sequence[str]] = None)
@overload
def SqlQuery(resource_name: str,
args: SqlQueryArgs,
opts: Optional[ResourceOptions] = None)
func NewSqlQuery(ctx *Context, name string, args SqlQueryArgs, opts ...ResourceOption) (*SqlQuery, error)
public SqlQuery(string name, SqlQueryArgs args, CustomResourceOptions? opts = null)
public SqlQuery(String name, SqlQueryArgs args)
public SqlQuery(String name, SqlQueryArgs args, CustomResourceOptions options)
type: databricks:SqlQuery
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args SqlQueryArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args SqlQueryArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args SqlQueryArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args SqlQueryArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args SqlQueryArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
SqlQuery Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The SqlQuery resource accepts the following input properties:
- Data
Source stringId - Query string
- Description string
- Name string
- Parameters
List<Sql
Query Parameter Args> - Parent string
- Run
As stringRole - Schedule
Sql
Query Schedule Args Operations on
databricks_sql_query
schedules are deprecated. Please usedatabricks_job
resource to schedule asql_task
.- List<string>
- Data
Source stringId - Query string
- Description string
- Name string
- Parameters
[]Sql
Query Parameter Args - Parent string
- Run
As stringRole - Schedule
Sql
Query Schedule Args Operations on
databricks_sql_query
schedules are deprecated. Please usedatabricks_job
resource to schedule asql_task
.- []string
- data
Source StringId - query String
- description String
- name String
- parameters
List<Sql
Query Parameter Args> - parent String
- run
As StringRole - schedule
Sql
Query Schedule Args Operations on
databricks_sql_query
schedules are deprecated. Please usedatabricks_job
resource to schedule asql_task
.- List<String>
- data
Source stringId - query string
- description string
- name string
- parameters
Sql
Query Parameter Args[] - parent string
- run
As stringRole - schedule
Sql
Query Schedule Args Operations on
databricks_sql_query
schedules are deprecated. Please usedatabricks_job
resource to schedule asql_task
.- string[]
- data_
source_ strid - query str
- description str
- name str
- parameters
Sequence[Sql
Query Parameter Args] - parent str
- run_
as_ strrole - schedule
Sql
Query Schedule Args Operations on
databricks_sql_query
schedules are deprecated. Please usedatabricks_job
resource to schedule asql_task
.- Sequence[str]
- data
Source StringId - query String
- description String
- name String
- parameters List<Property Map>
- parent String
- run
As StringRole - schedule Property Map
Operations on
databricks_sql_query
schedules are deprecated. Please usedatabricks_job
resource to schedule asql_task
.- List<String>
Outputs
All input properties are implicitly available as output properties. Additionally, the SqlQuery resource produces the following output properties:
- Id string
The provider-assigned unique ID for this managed resource.
- Id string
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
- id string
The provider-assigned unique ID for this managed resource.
- id str
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
Look up Existing SqlQuery Resource
Get an existing SqlQuery resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: SqlQueryState, opts?: CustomResourceOptions): SqlQuery
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
data_source_id: Optional[str] = None,
description: Optional[str] = None,
name: Optional[str] = None,
parameters: Optional[Sequence[SqlQueryParameterArgs]] = None,
parent: Optional[str] = None,
query: Optional[str] = None,
run_as_role: Optional[str] = None,
schedule: Optional[SqlQueryScheduleArgs] = None,
tags: Optional[Sequence[str]] = None) -> SqlQuery
func GetSqlQuery(ctx *Context, name string, id IDInput, state *SqlQueryState, opts ...ResourceOption) (*SqlQuery, error)
public static SqlQuery Get(string name, Input<string> id, SqlQueryState? state, CustomResourceOptions? opts = null)
public static SqlQuery get(String name, Output<String> id, SqlQueryState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Data
Source stringId - Description string
- Name string
- Parameters
List<Sql
Query Parameter Args> - Parent string
- Query string
- Run
As stringRole - Schedule
Sql
Query Schedule Args Operations on
databricks_sql_query
schedules are deprecated. Please usedatabricks_job
resource to schedule asql_task
.- List<string>
- Data
Source stringId - Description string
- Name string
- Parameters
[]Sql
Query Parameter Args - Parent string
- Query string
- Run
As stringRole - Schedule
Sql
Query Schedule Args Operations on
databricks_sql_query
schedules are deprecated. Please usedatabricks_job
resource to schedule asql_task
.- []string
- data
Source StringId - description String
- name String
- parameters
List<Sql
Query Parameter Args> - parent String
- query String
- run
As StringRole - schedule
Sql
Query Schedule Args Operations on
databricks_sql_query
schedules are deprecated. Please usedatabricks_job
resource to schedule asql_task
.- List<String>
- data
Source stringId - description string
- name string
- parameters
Sql
Query Parameter Args[] - parent string
- query string
- run
As stringRole - schedule
Sql
Query Schedule Args Operations on
databricks_sql_query
schedules are deprecated. Please usedatabricks_job
resource to schedule asql_task
.- string[]
- data_
source_ strid - description str
- name str
- parameters
Sequence[Sql
Query Parameter Args] - parent str
- query str
- run_
as_ strrole - schedule
Sql
Query Schedule Args Operations on
databricks_sql_query
schedules are deprecated. Please usedatabricks_job
resource to schedule asql_task
.- Sequence[str]
- data
Source StringId - description String
- name String
- parameters List<Property Map>
- parent String
- query String
- run
As StringRole - schedule Property Map
Operations on
databricks_sql_query
schedules are deprecated. Please usedatabricks_job
resource to schedule asql_task
.- List<String>
Supporting Types
SqlQueryParameter
- Name string
- Date
Sql
Query Parameter Date - Date
Range SqlQuery Parameter Date Range - Datetime
Sql
Query Parameter Datetime - Datetime
Range SqlQuery Parameter Datetime Range - Datetimesec
Sql
Query Parameter Datetimesec - Datetimesec
Range SqlQuery Parameter Datetimesec Range - Enum
Sql
Query Parameter Enum - Number
Sql
Query Parameter Number - Query
Sql
Query Parameter Query - Text
Sql
Query Parameter Text - Title string
- Name string
- Date
Sql
Query Parameter Date - Date
Range SqlQuery Parameter Date Range - Datetime
Sql
Query Parameter Datetime - Datetime
Range SqlQuery Parameter Datetime Range - Datetimesec
Sql
Query Parameter Datetimesec - Datetimesec
Range SqlQuery Parameter Datetimesec Range - Enum
Sql
Query Parameter Enum - Number
Sql
Query Parameter Number - Query
Sql
Query Parameter Query - Text
Sql
Query Parameter Text - Title string
- name String
- date
Sql
Query Parameter Date - date
Range SqlQuery Parameter Date Range - datetime
Sql
Query Parameter Datetime - datetime
Range SqlQuery Parameter Datetime Range - datetimesec
Sql
Query Parameter Datetimesec - datetimesec
Range SqlQuery Parameter Datetimesec Range - enum_
Sql
Query Parameter Enum - number
Sql
Query Parameter Number - query
Sql
Query Parameter Query - text
Sql
Query Parameter Text - title String
- name string
- date
Sql
Query Parameter Date - date
Range SqlQuery Parameter Date Range - datetime
Sql
Query Parameter Datetime - datetime
Range SqlQuery Parameter Datetime Range - datetimesec
Sql
Query Parameter Datetimesec - datetimesec
Range SqlQuery Parameter Datetimesec Range - enum
Sql
Query Parameter Enum - number
Sql
Query Parameter Number - query
Sql
Query Parameter Query - text
Sql
Query Parameter Text - title string
- name str
- date
Sql
Query Parameter Date - date_
range SqlQuery Parameter Date Range - datetime
Sql
Query Parameter Datetime - datetime_
range SqlQuery Parameter Datetime Range - datetimesec
Sql
Query Parameter Datetimesec - datetimesec_
range SqlQuery Parameter Datetimesec Range - enum
Sql
Query Parameter Enum - number
Sql
Query Parameter Number - query
Sql
Query Parameter Query - text
Sql
Query Parameter Text - title str
SqlQueryParameterDate
- Value string
- Value string
- value String
- value string
- value str
- value String
SqlQueryParameterDateRange
- range Property Map
- value String
SqlQueryParameterDateRangeRange
SqlQueryParameterDatetime
- Value string
- Value string
- value String
- value string
- value str
- value String
SqlQueryParameterDatetimeRange
- range Property Map
- value String
SqlQueryParameterDatetimeRangeRange
SqlQueryParameterDatetimesec
- Value string
- Value string
- value String
- value string
- value str
- value String
SqlQueryParameterDatetimesecRange
- range Property Map
- value String
SqlQueryParameterDatetimesecRangeRange
SqlQueryParameterEnum
- Options List<string>
- Multiple
Sql
Query Parameter Enum Multiple - Value string
- Values List<string>
- Options []string
- Multiple
Sql
Query Parameter Enum Multiple - Value string
- Values []string
- options List<String>
- multiple
Sql
Query Parameter Enum Multiple - value String
- values List<String>
- options string[]
- multiple
Sql
Query Parameter Enum Multiple - value string
- values string[]
- options Sequence[str]
- multiple
Sql
Query Parameter Enum Multiple - value str
- values Sequence[str]
- options List<String>
- multiple Property Map
- value String
- values List<String>
SqlQueryParameterEnumMultiple
SqlQueryParameterNumber
- Value double
- Value float64
- value Double
- value number
- value float
- value Number
SqlQueryParameterQuery
- Query
Id string - Multiple
Sql
Query Parameter Query Multiple - Value string
- Values List<string>
- Query
Id string - Multiple
Sql
Query Parameter Query Multiple - Value string
- Values []string
- query
Id String - multiple
Sql
Query Parameter Query Multiple - value String
- values List<String>
- query
Id string - multiple
Sql
Query Parameter Query Multiple - value string
- values string[]
- query_
id str - multiple
Sql
Query Parameter Query Multiple - value str
- values Sequence[str]
- query
Id String - multiple Property Map
- value String
- values List<String>
SqlQueryParameterQueryMultiple
SqlQueryParameterText
- Value string
- Value string
- value String
- value string
- value str
- value String
SqlQuerySchedule
SqlQueryScheduleContinuous
- Interval
Seconds int - Until
Date string
- Interval
Seconds int - Until
Date string
- interval
Seconds Integer - until
Date String
- interval
Seconds number - until
Date string
- interval_
seconds int - until_
date str
- interval
Seconds Number - until
Date String
SqlQueryScheduleDaily
- Interval
Days int - Time
Of stringDay - Until
Date string
- Interval
Days int - Time
Of stringDay - Until
Date string
- interval
Days Integer - time
Of StringDay - until
Date String
- interval
Days number - time
Of stringDay - until
Date string
- interval_
days int - time_
of_ strday - until_
date str
- interval
Days Number - time
Of StringDay - until
Date String
SqlQueryScheduleWeekly
- Day
Of stringWeek - Interval
Weeks int - Time
Of stringDay - Until
Date string
- Day
Of stringWeek - Interval
Weeks int - Time
Of stringDay - Until
Date string
- day
Of StringWeek - interval
Weeks Integer - time
Of StringDay - until
Date String
- day
Of stringWeek - interval
Weeks number - time
Of stringDay - until
Date string
- day_
of_ strweek - interval_
weeks int - time_
of_ strday - until_
date str
- day
Of StringWeek - interval
Weeks Number - time
Of StringDay - until
Date String
Import
You can import a databricks_sql_query
resource with ID like the followingbash
$ pulumi import databricks:index/sqlQuery:SqlQuery this <query-id>
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
databricks
Terraform Provider.