flexibleengine.DliSqlJob
Explore with Pulumi AI
Manages DLI SQL job resource within FlexibleEngine
Example Usage
Create a Sql job
import * as pulumi from "@pulumi/pulumi";
import * as flexibleengine from "@pulumi/flexibleengine";
const config = new pulumi.Config();
const databaseName = config.requireObject("databaseName");
const queueName = config.requireObject("queueName");
const sql = config.requireObject("sql");
const test = new flexibleengine.DliSqlJob("test", {
sql: sql,
databaseName: databaseName,
queueName: queueName,
});
import pulumi
import pulumi_flexibleengine as flexibleengine
config = pulumi.Config()
database_name = config.require_object("databaseName")
queue_name = config.require_object("queueName")
sql = config.require_object("sql")
test = flexibleengine.DliSqlJob("test",
sql=sql,
database_name=database_name,
queue_name=queue_name)
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/flexibleengine/flexibleengine"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi/config"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
cfg := config.New(ctx, "")
databaseName := cfg.RequireObject("databaseName")
queueName := cfg.RequireObject("queueName")
sql := cfg.RequireObject("sql")
_, err := flexibleengine.NewDliSqlJob(ctx, "test", &flexibleengine.DliSqlJobArgs{
Sql: pulumi.Any(sql),
DatabaseName: pulumi.Any(databaseName),
QueueName: pulumi.Any(queueName),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Flexibleengine = Pulumi.Flexibleengine;
return await Deployment.RunAsync(() =>
{
var config = new Config();
var databaseName = config.RequireObject<dynamic>("databaseName");
var queueName = config.RequireObject<dynamic>("queueName");
var sql = config.RequireObject<dynamic>("sql");
var test = new Flexibleengine.DliSqlJob("test", new()
{
Sql = sql,
DatabaseName = databaseName,
QueueName = queueName,
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.flexibleengine.DliSqlJob;
import com.pulumi.flexibleengine.DliSqlJobArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var config = ctx.config();
final var databaseName = config.get("databaseName");
final var queueName = config.get("queueName");
final var sql = config.get("sql");
var test = new DliSqlJob("test", DliSqlJobArgs.builder()
.sql(sql)
.databaseName(databaseName)
.queueName(queueName)
.build());
}
}
configuration:
databaseName:
type: dynamic
queueName:
type: dynamic
sql:
type: dynamic
resources:
test:
type: flexibleengine:DliSqlJob
properties:
sql: ${sql}
databaseName: ${databaseName}
queueName: ${queueName}
Create DliSqlJob Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new DliSqlJob(name: string, args: DliSqlJobArgs, opts?: CustomResourceOptions);
@overload
def DliSqlJob(resource_name: str,
args: DliSqlJobArgs,
opts: Optional[ResourceOptions] = None)
@overload
def DliSqlJob(resource_name: str,
opts: Optional[ResourceOptions] = None,
sql: Optional[str] = None,
conf: Optional[DliSqlJobConfArgs] = None,
database_name: Optional[str] = None,
dli_sql_job_id: Optional[str] = None,
queue_name: Optional[str] = None,
region: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
timeouts: Optional[DliSqlJobTimeoutsArgs] = None)
func NewDliSqlJob(ctx *Context, name string, args DliSqlJobArgs, opts ...ResourceOption) (*DliSqlJob, error)
public DliSqlJob(string name, DliSqlJobArgs args, CustomResourceOptions? opts = null)
public DliSqlJob(String name, DliSqlJobArgs args)
public DliSqlJob(String name, DliSqlJobArgs args, CustomResourceOptions options)
type: flexibleengine:DliSqlJob
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args DliSqlJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DliSqlJobArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DliSqlJobArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DliSqlJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DliSqlJobArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var dliSqlJobResource = new Flexibleengine.DliSqlJob("dliSqlJobResource", new()
{
Sql = "string",
Conf = new Flexibleengine.Inputs.DliSqlJobConfArgs
{
DliSqlJobTimeout = 0,
DliSqlSqlasyncEnabled = false,
SparkSqlAutoBroadcastJoinThreshold = 0,
SparkSqlBadRecordsPath = "string",
SparkSqlDynamicPartitionOverwriteEnabled = false,
SparkSqlFilesMaxPartitionBytes = 0,
SparkSqlMaxRecordsPerFile = 0,
SparkSqlShufflePartitions = 0,
},
DatabaseName = "string",
DliSqlJobId = "string",
QueueName = "string",
Region = "string",
Tags =
{
{ "string", "string" },
},
Timeouts = new Flexibleengine.Inputs.DliSqlJobTimeoutsArgs
{
Create = "string",
Delete = "string",
},
});
example, err := flexibleengine.NewDliSqlJob(ctx, "dliSqlJobResource", &flexibleengine.DliSqlJobArgs{
Sql: pulumi.String("string"),
Conf: &flexibleengine.DliSqlJobConfArgs{
DliSqlJobTimeout: pulumi.Float64(0),
DliSqlSqlasyncEnabled: pulumi.Bool(false),
SparkSqlAutoBroadcastJoinThreshold: pulumi.Float64(0),
SparkSqlBadRecordsPath: pulumi.String("string"),
SparkSqlDynamicPartitionOverwriteEnabled: pulumi.Bool(false),
SparkSqlFilesMaxPartitionBytes: pulumi.Float64(0),
SparkSqlMaxRecordsPerFile: pulumi.Float64(0),
SparkSqlShufflePartitions: pulumi.Float64(0),
},
DatabaseName: pulumi.String("string"),
DliSqlJobId: pulumi.String("string"),
QueueName: pulumi.String("string"),
Region: pulumi.String("string"),
Tags: pulumi.StringMap{
"string": pulumi.String("string"),
},
Timeouts: &flexibleengine.DliSqlJobTimeoutsArgs{
Create: pulumi.String("string"),
Delete: pulumi.String("string"),
},
})
var dliSqlJobResource = new DliSqlJob("dliSqlJobResource", DliSqlJobArgs.builder()
.sql("string")
.conf(DliSqlJobConfArgs.builder()
.dliSqlJobTimeout(0)
.dliSqlSqlasyncEnabled(false)
.sparkSqlAutoBroadcastJoinThreshold(0)
.sparkSqlBadRecordsPath("string")
.sparkSqlDynamicPartitionOverwriteEnabled(false)
.sparkSqlFilesMaxPartitionBytes(0)
.sparkSqlMaxRecordsPerFile(0)
.sparkSqlShufflePartitions(0)
.build())
.databaseName("string")
.dliSqlJobId("string")
.queueName("string")
.region("string")
.tags(Map.of("string", "string"))
.timeouts(DliSqlJobTimeoutsArgs.builder()
.create("string")
.delete("string")
.build())
.build());
dli_sql_job_resource = flexibleengine.DliSqlJob("dliSqlJobResource",
sql="string",
conf={
"dli_sql_job_timeout": 0,
"dli_sql_sqlasync_enabled": False,
"spark_sql_auto_broadcast_join_threshold": 0,
"spark_sql_bad_records_path": "string",
"spark_sql_dynamic_partition_overwrite_enabled": False,
"spark_sql_files_max_partition_bytes": 0,
"spark_sql_max_records_per_file": 0,
"spark_sql_shuffle_partitions": 0,
},
database_name="string",
dli_sql_job_id="string",
queue_name="string",
region="string",
tags={
"string": "string",
},
timeouts={
"create": "string",
"delete": "string",
})
const dliSqlJobResource = new flexibleengine.DliSqlJob("dliSqlJobResource", {
sql: "string",
conf: {
dliSqlJobTimeout: 0,
dliSqlSqlasyncEnabled: false,
sparkSqlAutoBroadcastJoinThreshold: 0,
sparkSqlBadRecordsPath: "string",
sparkSqlDynamicPartitionOverwriteEnabled: false,
sparkSqlFilesMaxPartitionBytes: 0,
sparkSqlMaxRecordsPerFile: 0,
sparkSqlShufflePartitions: 0,
},
databaseName: "string",
dliSqlJobId: "string",
queueName: "string",
region: "string",
tags: {
string: "string",
},
timeouts: {
create: "string",
"delete": "string",
},
});
type: flexibleengine:DliSqlJob
properties:
conf:
dliSqlJobTimeout: 0
dliSqlSqlasyncEnabled: false
sparkSqlAutoBroadcastJoinThreshold: 0
sparkSqlBadRecordsPath: string
sparkSqlDynamicPartitionOverwriteEnabled: false
sparkSqlFilesMaxPartitionBytes: 0
sparkSqlMaxRecordsPerFile: 0
sparkSqlShufflePartitions: 0
databaseName: string
dliSqlJobId: string
queueName: string
region: string
sql: string
tags:
string: string
timeouts:
create: string
delete: string
DliSqlJob Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The DliSqlJob resource accepts the following input properties:
- Sql string
- Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
- Conf
Dli
Sql Job Conf Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.
The
conf
block supports:- Database
Name string - Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
- Dli
Sql stringJob Id - Indicates a resource ID in UUID format.
- Queue
Name string - Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
- Region string
- Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
- Dictionary<string, string>
- Specifies label of a Job. Changing this parameter will create a new resource.
- Timeouts
Dli
Sql Job Timeouts
- Sql string
- Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
- Conf
Dli
Sql Job Conf Args Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.
The
conf
block supports:- Database
Name string - Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
- Dli
Sql stringJob Id - Indicates a resource ID in UUID format.
- Queue
Name string - Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
- Region string
- Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
- map[string]string
- Specifies label of a Job. Changing this parameter will create a new resource.
- Timeouts
Dli
Sql Job Timeouts Args
- sql String
- Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
- conf
Dli
Sql Job Conf Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.
The
conf
block supports:- database
Name String - Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
- dli
Sql StringJob Id - Indicates a resource ID in UUID format.
- queue
Name String - Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
- region String
- Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
- Map<String,String>
- Specifies label of a Job. Changing this parameter will create a new resource.
- timeouts
Dli
Sql Job Timeouts
- sql string
- Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
- conf
Dli
Sql Job Conf Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.
The
conf
block supports:- database
Name string - Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
- dli
Sql stringJob Id - Indicates a resource ID in UUID format.
- queue
Name string - Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
- region string
- Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
- {[key: string]: string}
- Specifies label of a Job. Changing this parameter will create a new resource.
- timeouts
Dli
Sql Job Timeouts
- sql str
- Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
- conf
Dli
Sql Job Conf Args Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.
The
conf
block supports:- database_
name str - Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
- dli_
sql_ strjob_ id - Indicates a resource ID in UUID format.
- queue_
name str - Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
- region str
- Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
- Mapping[str, str]
- Specifies label of a Job. Changing this parameter will create a new resource.
- timeouts
Dli
Sql Job Timeouts Args
- sql String
- Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
- conf Property Map
Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.
The
conf
block supports:- database
Name String - Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
- dli
Sql StringJob Id - Indicates a resource ID in UUID format.
- queue
Name String - Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
- region String
- Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
- Map<String>
- Specifies label of a Job. Changing this parameter will create a new resource.
- timeouts Property Map
Outputs
All input properties are implicitly available as output properties. Additionally, the DliSqlJob resource produces the following output properties:
- Duration double
- Job running duration (unit: millisecond).
- Id string
- The provider-assigned unique ID for this managed resource.
- Job
Type string - The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
- Owner string
- User who submits a job.
- Rows
List<Immutable
Array<string>> - When the statement type is DDL, results of the DDL are displayed.
- Schemas
List<Immutable
Dictionary<string, string>> - When the statement type is DDL, the column name and type of DDL are displayed.
- Start
Time string - Time when a job is started, in RFC-3339 format. e.g.
2019-10-12T07:20:50.52Z
- Status string
- Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
- Duration float64
- Job running duration (unit: millisecond).
- Id string
- The provider-assigned unique ID for this managed resource.
- Job
Type string - The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
- Owner string
- User who submits a job.
- Rows [][]string
- When the statement type is DDL, results of the DDL are displayed.
- Schemas []map[string]string
- When the statement type is DDL, the column name and type of DDL are displayed.
- Start
Time string - Time when a job is started, in RFC-3339 format. e.g.
2019-10-12T07:20:50.52Z
- Status string
- Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
- duration Double
- Job running duration (unit: millisecond).
- id String
- The provider-assigned unique ID for this managed resource.
- job
Type String - The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
- owner String
- User who submits a job.
- rows List<List<String>>
- When the statement type is DDL, results of the DDL are displayed.
- schemas List<Map<String,String>>
- When the statement type is DDL, the column name and type of DDL are displayed.
- start
Time String - Time when a job is started, in RFC-3339 format. e.g.
2019-10-12T07:20:50.52Z
- status String
- Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
- duration number
- Job running duration (unit: millisecond).
- id string
- The provider-assigned unique ID for this managed resource.
- job
Type string - The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
- owner string
- User who submits a job.
- rows string[][]
- When the statement type is DDL, results of the DDL are displayed.
- schemas {[key: string]: string}[]
- When the statement type is DDL, the column name and type of DDL are displayed.
- start
Time string - Time when a job is started, in RFC-3339 format. e.g.
2019-10-12T07:20:50.52Z
- status string
- Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
- duration float
- Job running duration (unit: millisecond).
- id str
- The provider-assigned unique ID for this managed resource.
- job_
type str - The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
- owner str
- User who submits a job.
- rows Sequence[Sequence[str]]
- When the statement type is DDL, results of the DDL are displayed.
- schemas Sequence[Mapping[str, str]]
- When the statement type is DDL, the column name and type of DDL are displayed.
- start_
time str - Time when a job is started, in RFC-3339 format. e.g.
2019-10-12T07:20:50.52Z
- status str
- Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
- duration Number
- Job running duration (unit: millisecond).
- id String
- The provider-assigned unique ID for this managed resource.
- job
Type String - The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
- owner String
- User who submits a job.
- rows List<List<String>>
- When the statement type is DDL, results of the DDL are displayed.
- schemas List<Map<String>>
- When the statement type is DDL, the column name and type of DDL are displayed.
- start
Time String - Time when a job is started, in RFC-3339 format. e.g.
2019-10-12T07:20:50.52Z
- status String
- Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
Look up Existing DliSqlJob Resource
Get an existing DliSqlJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DliSqlJobState, opts?: CustomResourceOptions): DliSqlJob
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
conf: Optional[DliSqlJobConfArgs] = None,
database_name: Optional[str] = None,
dli_sql_job_id: Optional[str] = None,
duration: Optional[float] = None,
job_type: Optional[str] = None,
owner: Optional[str] = None,
queue_name: Optional[str] = None,
region: Optional[str] = None,
rows: Optional[Sequence[Sequence[str]]] = None,
schemas: Optional[Sequence[Mapping[str, str]]] = None,
sql: Optional[str] = None,
start_time: Optional[str] = None,
status: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
timeouts: Optional[DliSqlJobTimeoutsArgs] = None) -> DliSqlJob
func GetDliSqlJob(ctx *Context, name string, id IDInput, state *DliSqlJobState, opts ...ResourceOption) (*DliSqlJob, error)
public static DliSqlJob Get(string name, Input<string> id, DliSqlJobState? state, CustomResourceOptions? opts = null)
public static DliSqlJob get(String name, Output<String> id, DliSqlJobState state, CustomResourceOptions options)
resources: _: type: flexibleengine:DliSqlJob get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Conf
Dli
Sql Job Conf Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.
The
conf
block supports:- Database
Name string - Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
- Dli
Sql stringJob Id - Indicates a resource ID in UUID format.
- Duration double
- Job running duration (unit: millisecond).
- Job
Type string - The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
- Owner string
- User who submits a job.
- Queue
Name string - Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
- Region string
- Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
- Rows
List<Immutable
Array<string>> - When the statement type is DDL, results of the DDL are displayed.
- Schemas
List<Immutable
Dictionary<string, string>> - When the statement type is DDL, the column name and type of DDL are displayed.
- Sql string
- Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
- Start
Time string - Time when a job is started, in RFC-3339 format. e.g.
2019-10-12T07:20:50.52Z
- Status string
- Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
- Dictionary<string, string>
- Specifies label of a Job. Changing this parameter will create a new resource.
- Timeouts
Dli
Sql Job Timeouts
- Conf
Dli
Sql Job Conf Args Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.
The
conf
block supports:- Database
Name string - Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
- Dli
Sql stringJob Id - Indicates a resource ID in UUID format.
- Duration float64
- Job running duration (unit: millisecond).
- Job
Type string - The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
- Owner string
- User who submits a job.
- Queue
Name string - Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
- Region string
- Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
- Rows [][]string
- When the statement type is DDL, results of the DDL are displayed.
- Schemas []map[string]string
- When the statement type is DDL, the column name and type of DDL are displayed.
- Sql string
- Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
- Start
Time string - Time when a job is started, in RFC-3339 format. e.g.
2019-10-12T07:20:50.52Z
- Status string
- Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
- map[string]string
- Specifies label of a Job. Changing this parameter will create a new resource.
- Timeouts
Dli
Sql Job Timeouts Args
- conf
Dli
Sql Job Conf Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.
The
conf
block supports:- database
Name String - Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
- dli
Sql StringJob Id - Indicates a resource ID in UUID format.
- duration Double
- Job running duration (unit: millisecond).
- job
Type String - The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
- owner String
- User who submits a job.
- queue
Name String - Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
- region String
- Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
- rows List<List<String>>
- When the statement type is DDL, results of the DDL are displayed.
- schemas List<Map<String,String>>
- When the statement type is DDL, the column name and type of DDL are displayed.
- sql String
- Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
- start
Time String - Time when a job is started, in RFC-3339 format. e.g.
2019-10-12T07:20:50.52Z
- status String
- Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
- Map<String,String>
- Specifies label of a Job. Changing this parameter will create a new resource.
- timeouts
Dli
Sql Job Timeouts
- conf
Dli
Sql Job Conf Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.
The
conf
block supports:- database
Name string - Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
- dli
Sql stringJob Id - Indicates a resource ID in UUID format.
- duration number
- Job running duration (unit: millisecond).
- job
Type string - The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
- owner string
- User who submits a job.
- queue
Name string - Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
- region string
- Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
- rows string[][]
- When the statement type is DDL, results of the DDL are displayed.
- schemas {[key: string]: string}[]
- When the statement type is DDL, the column name and type of DDL are displayed.
- sql string
- Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
- start
Time string - Time when a job is started, in RFC-3339 format. e.g.
2019-10-12T07:20:50.52Z
- status string
- Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
- {[key: string]: string}
- Specifies label of a Job. Changing this parameter will create a new resource.
- timeouts
Dli
Sql Job Timeouts
- conf
Dli
Sql Job Conf Args Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.
The
conf
block supports:- database_
name str - Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
- dli_
sql_ strjob_ id - Indicates a resource ID in UUID format.
- duration float
- Job running duration (unit: millisecond).
- job_
type str - The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
- owner str
- User who submits a job.
- queue_
name str - Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
- region str
- Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
- rows Sequence[Sequence[str]]
- When the statement type is DDL, results of the DDL are displayed.
- schemas Sequence[Mapping[str, str]]
- When the statement type is DDL, the column name and type of DDL are displayed.
- sql str
- Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
- start_
time str - Time when a job is started, in RFC-3339 format. e.g.
2019-10-12T07:20:50.52Z
- status str
- Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
- Mapping[str, str]
- Specifies label of a Job. Changing this parameter will create a new resource.
- timeouts
Dli
Sql Job Timeouts Args
- conf Property Map
Specifies the configuration parameters for the SQL job. Changing this parameter will create a new resource. The conf object structure is documented below.
The
conf
block supports:- database
Name String - Specifies the database where the SQL is executed. This argument does not need to be configured during database creation. Changing this parameter will create a new resource.
- dli
Sql StringJob Id - Indicates a resource ID in UUID format.
- duration Number
- Job running duration (unit: millisecond).
- job
Type String - The type of job, includes DDL, DCL, IMPORT, EXPORT, QUERY and INSERT.
- owner String
- User who submits a job.
- queue
Name String - Specifies queue which this job to be submitted belongs. Changing this parameter will create a new resource.
- region String
- Specifies the region in which to create the DLI table resource. If omitted, the provider-level region will be used. Changing this parameter will create a new resource.
- rows List<List<String>>
- When the statement type is DDL, results of the DDL are displayed.
- schemas List<Map<String>>
- When the statement type is DDL, the column name and type of DDL are displayed.
- sql String
- Specifies SQL statement that you want to execute. Changing this parameter will create a new resource.
- start
Time String - Time when a job is started, in RFC-3339 format. e.g.
2019-10-12T07:20:50.52Z
- status String
- Status of a job, includes RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELED.
- Map<String>
- Specifies label of a Job. Changing this parameter will create a new resource.
- timeouts Property Map
Supporting Types
DliSqlJobConf, DliSqlJobConfArgs
- Dli
Sql doubleJob Timeout - Sets the job running timeout interval. If the timeout interval
expires, the job is canceled. Unit:
ms
. Changing this parameter will create a new resource. - Dli
Sql boolSqlasync Enabled - Specifies whether DDL and DCL statements are executed
asynchronously. The value true indicates that asynchronous execution is enabled. Default value is
false
. Changing this parameter will create a new resource. - Spark
Sql doubleAuto Broadcast Join Threshold - Maximum size of the table that
displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display.
Default value is
209715200
. Changing this parameter will create a new resource. Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics no-scan command and the file-based data source table that directly calculates statistics based on data files are supported. Changing this parameter will create a new resource. - Spark
Sql stringBad Records Path - Path of bad records. Changing this parameter will create a new resource.
- Spark
Sql boolDynamic Partition Overwrite Enabled - In dynamic mode, Spark does not delete
the previous partitions and only overwrites the partitions without data during execution. Default value is
false
. Changing this parameter will create a new resource. - Spark
Sql doubleFiles Max Partition Bytes - Maximum number of bytes to be packed into a
single partition when a file is read. Default value is
134217728
. Changing this parameter will create a new resource. - Spark
Sql doubleMax Records Per File - Maximum number of records to be written
into a single file. If the value is zero or negative, there is no limit. Default value is
0
. Changing this parameter will create a new resource. - Spark
Sql doubleShuffle Partitions - Default number of partitions used to filter
data for join or aggregation. Default value is
4096
. Changing this parameter will create a new resource.
- Dli
Sql float64Job Timeout - Sets the job running timeout interval. If the timeout interval
expires, the job is canceled. Unit:
ms
. Changing this parameter will create a new resource. - Dli
Sql boolSqlasync Enabled - Specifies whether DDL and DCL statements are executed
asynchronously. The value true indicates that asynchronous execution is enabled. Default value is
false
. Changing this parameter will create a new resource. - Spark
Sql float64Auto Broadcast Join Threshold - Maximum size of the table that
displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display.
Default value is
209715200
. Changing this parameter will create a new resource. Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics no-scan command and the file-based data source table that directly calculates statistics based on data files are supported. Changing this parameter will create a new resource. - Spark
Sql stringBad Records Path - Path of bad records. Changing this parameter will create a new resource.
- Spark
Sql boolDynamic Partition Overwrite Enabled - In dynamic mode, Spark does not delete
the previous partitions and only overwrites the partitions without data during execution. Default value is
false
. Changing this parameter will create a new resource. - Spark
Sql float64Files Max Partition Bytes - Maximum number of bytes to be packed into a
single partition when a file is read. Default value is
134217728
. Changing this parameter will create a new resource. - Spark
Sql float64Max Records Per File - Maximum number of records to be written
into a single file. If the value is zero or negative, there is no limit. Default value is
0
. Changing this parameter will create a new resource. - Spark
Sql float64Shuffle Partitions - Default number of partitions used to filter
data for join or aggregation. Default value is
4096
. Changing this parameter will create a new resource.
- dli
Sql DoubleJob Timeout - Sets the job running timeout interval. If the timeout interval
expires, the job is canceled. Unit:
ms
. Changing this parameter will create a new resource. - dli
Sql BooleanSqlasync Enabled - Specifies whether DDL and DCL statements are executed
asynchronously. The value true indicates that asynchronous execution is enabled. Default value is
false
. Changing this parameter will create a new resource. - spark
Sql DoubleAuto Broadcast Join Threshold - Maximum size of the table that
displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display.
Default value is
209715200
. Changing this parameter will create a new resource. Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics no-scan command and the file-based data source table that directly calculates statistics based on data files are supported. Changing this parameter will create a new resource. - spark
Sql StringBad Records Path - Path of bad records. Changing this parameter will create a new resource.
- spark
Sql BooleanDynamic Partition Overwrite Enabled - In dynamic mode, Spark does not delete
the previous partitions and only overwrites the partitions without data during execution. Default value is
false
. Changing this parameter will create a new resource. - spark
Sql DoubleFiles Max Partition Bytes - Maximum number of bytes to be packed into a
single partition when a file is read. Default value is
134217728
. Changing this parameter will create a new resource. - spark
Sql DoubleMax Records Per File - Maximum number of records to be written
into a single file. If the value is zero or negative, there is no limit. Default value is
0
. Changing this parameter will create a new resource. - spark
Sql DoubleShuffle Partitions - Default number of partitions used to filter
data for join or aggregation. Default value is
4096
. Changing this parameter will create a new resource.
- dli
Sql numberJob Timeout - Sets the job running timeout interval. If the timeout interval
expires, the job is canceled. Unit:
ms
. Changing this parameter will create a new resource. - dli
Sql booleanSqlasync Enabled - Specifies whether DDL and DCL statements are executed
asynchronously. The value true indicates that asynchronous execution is enabled. Default value is
false
. Changing this parameter will create a new resource. - spark
Sql numberAuto Broadcast Join Threshold - Maximum size of the table that
displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display.
Default value is
209715200
. Changing this parameter will create a new resource. Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics no-scan command and the file-based data source table that directly calculates statistics based on data files are supported. Changing this parameter will create a new resource. - spark
Sql stringBad Records Path - Path of bad records. Changing this parameter will create a new resource.
- spark
Sql booleanDynamic Partition Overwrite Enabled - In dynamic mode, Spark does not delete
the previous partitions and only overwrites the partitions without data during execution. Default value is
false
. Changing this parameter will create a new resource. - spark
Sql numberFiles Max Partition Bytes - Maximum number of bytes to be packed into a
single partition when a file is read. Default value is
134217728
. Changing this parameter will create a new resource. - spark
Sql numberMax Records Per File - Maximum number of records to be written
into a single file. If the value is zero or negative, there is no limit. Default value is
0
. Changing this parameter will create a new resource. - spark
Sql numberShuffle Partitions - Default number of partitions used to filter
data for join or aggregation. Default value is
4096
. Changing this parameter will create a new resource.
- dli_
sql_ floatjob_ timeout - Sets the job running timeout interval. If the timeout interval
expires, the job is canceled. Unit:
ms
. Changing this parameter will create a new resource. - dli_
sql_ boolsqlasync_ enabled - Specifies whether DDL and DCL statements are executed
asynchronously. The value true indicates that asynchronous execution is enabled. Default value is
false
. Changing this parameter will create a new resource. - spark_
sql_ floatauto_ broadcast_ join_ threshold - Maximum size of the table that
displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display.
Default value is
209715200
. Changing this parameter will create a new resource. Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics no-scan command and the file-based data source table that directly calculates statistics based on data files are supported. Changing this parameter will create a new resource. - spark_
sql_ strbad_ records_ path - Path of bad records. Changing this parameter will create a new resource.
- spark_
sql_ booldynamic_ partition_ overwrite_ enabled - In dynamic mode, Spark does not delete
the previous partitions and only overwrites the partitions without data during execution. Default value is
false
. Changing this parameter will create a new resource. - spark_
sql_ floatfiles_ max_ partition_ bytes - Maximum number of bytes to be packed into a
single partition when a file is read. Default value is
134217728
. Changing this parameter will create a new resource. - spark_
sql_ floatmax_ records_ per_ file - Maximum number of records to be written
into a single file. If the value is zero or negative, there is no limit. Default value is
0
. Changing this parameter will create a new resource. - spark_
sql_ floatshuffle_ partitions - Default number of partitions used to filter
data for join or aggregation. Default value is
4096
. Changing this parameter will create a new resource.
- dli
Sql NumberJob Timeout - Sets the job running timeout interval. If the timeout interval
expires, the job is canceled. Unit:
ms
. Changing this parameter will create a new resource. - dli
Sql BooleanSqlasync Enabled - Specifies whether DDL and DCL statements are executed
asynchronously. The value true indicates that asynchronous execution is enabled. Default value is
false
. Changing this parameter will create a new resource. - spark
Sql NumberAuto Broadcast Join Threshold - Maximum size of the table that
displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display.
Default value is
209715200
. Changing this parameter will create a new resource. Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics no-scan command and the file-based data source table that directly calculates statistics based on data files are supported. Changing this parameter will create a new resource. - spark
Sql StringBad Records Path - Path of bad records. Changing this parameter will create a new resource.
- spark
Sql BooleanDynamic Partition Overwrite Enabled - In dynamic mode, Spark does not delete
the previous partitions and only overwrites the partitions without data during execution. Default value is
false
. Changing this parameter will create a new resource. - spark
Sql NumberFiles Max Partition Bytes - Maximum number of bytes to be packed into a
single partition when a file is read. Default value is
134217728
. Changing this parameter will create a new resource. - spark
Sql NumberMax Records Per File - Maximum number of records to be written
into a single file. If the value is zero or negative, there is no limit. Default value is
0
. Changing this parameter will create a new resource. - spark
Sql NumberShuffle Partitions - Default number of partitions used to filter
data for join or aggregation. Default value is
4096
. Changing this parameter will create a new resource.
DliSqlJobTimeouts, DliSqlJobTimeoutsArgs
Import
DLI SQL job can be imported by id
, e.g.
$ pulumi import flexibleengine:index/dliSqlJob:DliSqlJob example 7f803d70-c533-469f-8431-e378f3e97123
Note that the imported state may not be identical to your resource definition, due to some attributes missing from the
API response, security or some other reason. The missing attributes include: conf
, rows
and schema
.
It is generally recommended running pulumi preview
after importing a resource. You can then decide if changes should
be applied to the resource, or the resource definition should be updated to align with the resource. Also, you can
ignore changes as below.
hcl
resource “flexibleengine_dli_sql_job” “test” {
...
lifecycle {
ignore_changes = [
conf, rows, schema
]
}
}
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- flexibleengine flexibleenginecloud/terraform-provider-flexibleengine
- License
- Notes
- This Pulumi package is based on the
flexibleengine
Terraform Provider.