flexibleengine.DliSparkJob
Explore with Pulumi AI
Manages spark job resource of DLI within FlexibleEngine
Example Usage
Submit a new spark job with jar packages
import * as pulumi from "@pulumi/pulumi";
import * as flexibleengine from "@pulumi/flexibleengine";
const _default = new flexibleengine.DliSparkJob("default", {
queueName: _var.queue_name,
appName: "driver_package/driver_behavior.jar",
mainClass: "driver_behavior",
specification: "B",
maxRetries: 20,
});
import pulumi
import pulumi_flexibleengine as flexibleengine
default = flexibleengine.DliSparkJob("default",
queue_name=var["queue_name"],
app_name="driver_package/driver_behavior.jar",
main_class="driver_behavior",
specification="B",
max_retries=20)
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/flexibleengine/flexibleengine"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := flexibleengine.NewDliSparkJob(ctx, "default", &flexibleengine.DliSparkJobArgs{
QueueName: pulumi.Any(_var.Queue_name),
AppName: pulumi.String("driver_package/driver_behavior.jar"),
MainClass: pulumi.String("driver_behavior"),
Specification: pulumi.String("B"),
MaxRetries: pulumi.Float64(20),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Flexibleengine = Pulumi.Flexibleengine;
return await Deployment.RunAsync(() =>
{
var @default = new Flexibleengine.DliSparkJob("default", new()
{
QueueName = @var.Queue_name,
AppName = "driver_package/driver_behavior.jar",
MainClass = "driver_behavior",
Specification = "B",
MaxRetries = 20,
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.flexibleengine.DliSparkJob;
import com.pulumi.flexibleengine.DliSparkJobArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var default_ = new DliSparkJob("default", DliSparkJobArgs.builder()
.queueName(var_.queue_name())
.appName("driver_package/driver_behavior.jar")
.mainClass("driver_behavior")
.specification("B")
.maxRetries(20)
.build());
}
}
resources:
default:
type: flexibleengine:DliSparkJob
properties:
queueName: ${var.queue_name}
appName: driver_package/driver_behavior.jar
mainClass: driver_behavior
specification: B
maxRetries: 20
Create DliSparkJob Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new DliSparkJob(name: string, args: DliSparkJobArgs, opts?: CustomResourceOptions);
@overload
def DliSparkJob(resource_name: str,
args: DliSparkJobArgs,
opts: Optional[ResourceOptions] = None)
@overload
def DliSparkJob(resource_name: str,
opts: Optional[ResourceOptions] = None,
app_name: Optional[str] = None,
queue_name: Optional[str] = None,
files: Optional[Sequence[str]] = None,
jars: Optional[Sequence[str]] = None,
dli_spark_job_id: Optional[str] = None,
driver_cores: Optional[float] = None,
driver_memory: Optional[str] = None,
executor_cores: Optional[float] = None,
executor_memory: Optional[str] = None,
executors: Optional[float] = None,
configurations: Optional[Mapping[str, str]] = None,
dependent_packages: Optional[Sequence[DliSparkJobDependentPackageArgs]] = None,
main_class: Optional[str] = None,
max_retries: Optional[float] = None,
modules: Optional[Sequence[str]] = None,
name: Optional[str] = None,
python_files: Optional[Sequence[str]] = None,
app_parameters: Optional[str] = None,
region: Optional[str] = None,
specification: Optional[str] = None,
timeouts: Optional[DliSparkJobTimeoutsArgs] = None)
func NewDliSparkJob(ctx *Context, name string, args DliSparkJobArgs, opts ...ResourceOption) (*DliSparkJob, error)
public DliSparkJob(string name, DliSparkJobArgs args, CustomResourceOptions? opts = null)
public DliSparkJob(String name, DliSparkJobArgs args)
public DliSparkJob(String name, DliSparkJobArgs args, CustomResourceOptions options)
type: flexibleengine:DliSparkJob
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args DliSparkJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DliSparkJobArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DliSparkJobArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DliSparkJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DliSparkJobArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var dliSparkJobResource = new Flexibleengine.DliSparkJob("dliSparkJobResource", new()
{
AppName = "string",
QueueName = "string",
Files = new[]
{
"string",
},
Jars = new[]
{
"string",
},
DliSparkJobId = "string",
DriverCores = 0,
DriverMemory = "string",
ExecutorCores = 0,
ExecutorMemory = "string",
Executors = 0,
Configurations =
{
{ "string", "string" },
},
DependentPackages = new[]
{
new Flexibleengine.Inputs.DliSparkJobDependentPackageArgs
{
GroupName = "string",
Packages = new[]
{
new Flexibleengine.Inputs.DliSparkJobDependentPackagePackageArgs
{
PackageName = "string",
Type = "string",
},
},
},
},
MainClass = "string",
MaxRetries = 0,
Modules = new[]
{
"string",
},
Name = "string",
PythonFiles = new[]
{
"string",
},
AppParameters = "string",
Region = "string",
Specification = "string",
Timeouts = new Flexibleengine.Inputs.DliSparkJobTimeoutsArgs
{
Delete = "string",
},
});
example, err := flexibleengine.NewDliSparkJob(ctx, "dliSparkJobResource", &flexibleengine.DliSparkJobArgs{
AppName: pulumi.String("string"),
QueueName: pulumi.String("string"),
Files: pulumi.StringArray{
pulumi.String("string"),
},
Jars: pulumi.StringArray{
pulumi.String("string"),
},
DliSparkJobId: pulumi.String("string"),
DriverCores: pulumi.Float64(0),
DriverMemory: pulumi.String("string"),
ExecutorCores: pulumi.Float64(0),
ExecutorMemory: pulumi.String("string"),
Executors: pulumi.Float64(0),
Configurations: pulumi.StringMap{
"string": pulumi.String("string"),
},
DependentPackages: flexibleengine.DliSparkJobDependentPackageArray{
&flexibleengine.DliSparkJobDependentPackageArgs{
GroupName: pulumi.String("string"),
Packages: flexibleengine.DliSparkJobDependentPackagePackageArray{
&flexibleengine.DliSparkJobDependentPackagePackageArgs{
PackageName: pulumi.String("string"),
Type: pulumi.String("string"),
},
},
},
},
MainClass: pulumi.String("string"),
MaxRetries: pulumi.Float64(0),
Modules: pulumi.StringArray{
pulumi.String("string"),
},
Name: pulumi.String("string"),
PythonFiles: pulumi.StringArray{
pulumi.String("string"),
},
AppParameters: pulumi.String("string"),
Region: pulumi.String("string"),
Specification: pulumi.String("string"),
Timeouts: &flexibleengine.DliSparkJobTimeoutsArgs{
Delete: pulumi.String("string"),
},
})
var dliSparkJobResource = new DliSparkJob("dliSparkJobResource", DliSparkJobArgs.builder()
.appName("string")
.queueName("string")
.files("string")
.jars("string")
.dliSparkJobId("string")
.driverCores(0)
.driverMemory("string")
.executorCores(0)
.executorMemory("string")
.executors(0)
.configurations(Map.of("string", "string"))
.dependentPackages(DliSparkJobDependentPackageArgs.builder()
.groupName("string")
.packages(DliSparkJobDependentPackagePackageArgs.builder()
.packageName("string")
.type("string")
.build())
.build())
.mainClass("string")
.maxRetries(0)
.modules("string")
.name("string")
.pythonFiles("string")
.appParameters("string")
.region("string")
.specification("string")
.timeouts(DliSparkJobTimeoutsArgs.builder()
.delete("string")
.build())
.build());
dli_spark_job_resource = flexibleengine.DliSparkJob("dliSparkJobResource",
app_name="string",
queue_name="string",
files=["string"],
jars=["string"],
dli_spark_job_id="string",
driver_cores=0,
driver_memory="string",
executor_cores=0,
executor_memory="string",
executors=0,
configurations={
"string": "string",
},
dependent_packages=[{
"group_name": "string",
"packages": [{
"package_name": "string",
"type": "string",
}],
}],
main_class="string",
max_retries=0,
modules=["string"],
name="string",
python_files=["string"],
app_parameters="string",
region="string",
specification="string",
timeouts={
"delete": "string",
})
const dliSparkJobResource = new flexibleengine.DliSparkJob("dliSparkJobResource", {
appName: "string",
queueName: "string",
files: ["string"],
jars: ["string"],
dliSparkJobId: "string",
driverCores: 0,
driverMemory: "string",
executorCores: 0,
executorMemory: "string",
executors: 0,
configurations: {
string: "string",
},
dependentPackages: [{
groupName: "string",
packages: [{
packageName: "string",
type: "string",
}],
}],
mainClass: "string",
maxRetries: 0,
modules: ["string"],
name: "string",
pythonFiles: ["string"],
appParameters: "string",
region: "string",
specification: "string",
timeouts: {
"delete": "string",
},
});
type: flexibleengine:DliSparkJob
properties:
appName: string
appParameters: string
configurations:
string: string
dependentPackages:
- groupName: string
packages:
- packageName: string
type: string
dliSparkJobId: string
driverCores: 0
driverMemory: string
executorCores: 0
executorMemory: string
executors: 0
files:
- string
jars:
- string
mainClass: string
maxRetries: 0
modules:
- string
name: string
pythonFiles:
- string
queueName: string
region: string
specification: string
timeouts:
delete: string
DliSparkJob Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The DliSparkJob resource accepts the following input properties:
- App
Name string - Specifies the name of the package that is of the JAR or python file type and
has been uploaded to the DLI resource management system.
The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - Queue
Name string - Specifies the DLI queue name. Changing this parameter will submit a new spark job.
- App
Parameters string - Specifies the input parameters of the main class. Changing this parameter will submit a new spark job.
- Configurations Dictionary<string, string>
- Specifies the configuration items of the DLI spark.
Please following the document of Spark configurations for
this argument. If you want to enable the
access metadata
of DLI spark in Flexibleengine, please setspark.dli.metaAccess.enable
totrue
. Changing this parameter will submit a new spark job. - Dependent
Packages List<DliSpark Job Dependent Package> - Specifies a list of package resource objects. The object structure is documented below. Changing this parameter will submit a new spark job.
- Dli
Spark stringJob Id - ID of the spark job.
- Driver
Cores double - Specifies the number of CPU cores of the Spark application driver.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Driver
Memory string - Specifies the driver memory of the spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Executor
Cores double - Specifies the number of CPU cores of each executor in the Spark
application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Executor
Memory string Specifies the executor memory of the spark application. application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job.->NOTE: The unit must be provided, such as GB or MB.
- Executors double
- Specifies the number of executors in a spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Files List<string>
- Specifies a list of the other dependencies name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<dependent files>
. Changing this parameter will submit a new spark job. - Jars List<string>
- Specifies a list of the jar package name which has been uploaded to the DLI
resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - Main
Class string - Specifies the main class of the spark job.
Required if the
app_name
is the JAR type. Changing this parameter will submit a new spark job. - Max
Retries double - Specifies the maximum retry times.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Modules List<string>
- Specifies a list of modules that depend on system resources.
The dependent modules and corresponding services are as follows.
Changing this parameter will submit a new spark job.
- sys.datasource.hbase: CloudTable/MRS HBase
- sys.datasource.opentsdb: CloudTable/MRS OpenTSDB
- sys.datasource.rds: RDS MySQL
- sys.datasource.css: CSS
- Name string
- Specifies the spark job name. The value contains a maximum of 128 characters. Changing this parameter will submit a new spark job.
- Python
Files List<string> - Specifies a list of the python file name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<python file name>
. Changing this parameter will submit a new spark job. - Region string
- Specifies the region in which to submit a spark job. If omitted, the provider-level region will be used. Changing this parameter will submit a new spark job.
- Specification string
Specifies the compute resource type for spark application. The available types and related specifications are as follows, default to minimum configuration (type A). Changing this parameter will submit a new spark job.
| type | resource | driver cores | excutor cores | driver memory | executor memory | num executor | | ---- | ---- | ---- | ---- | ---- | ---- | ---- | | A | 8 vCPUs, 32-GB memory | 2 | 1 | 7G | 4G | 6 | | B | 16 vCPUs, 64-GB memory | 2 | 2 | 7G | 8G | 7 | | C | 32 vCPUs, 128-GB memory | 4 | 2 | 12G | 8G | 14 |
- Timeouts
Dli
Spark Job Timeouts
- App
Name string - Specifies the name of the package that is of the JAR or python file type and
has been uploaded to the DLI resource management system.
The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - Queue
Name string - Specifies the DLI queue name. Changing this parameter will submit a new spark job.
- App
Parameters string - Specifies the input parameters of the main class. Changing this parameter will submit a new spark job.
- Configurations map[string]string
- Specifies the configuration items of the DLI spark.
Please following the document of Spark configurations for
this argument. If you want to enable the
access metadata
of DLI spark in Flexibleengine, please setspark.dli.metaAccess.enable
totrue
. Changing this parameter will submit a new spark job. - Dependent
Packages []DliSpark Job Dependent Package Args - Specifies a list of package resource objects. The object structure is documented below. Changing this parameter will submit a new spark job.
- Dli
Spark stringJob Id - ID of the spark job.
- Driver
Cores float64 - Specifies the number of CPU cores of the Spark application driver.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Driver
Memory string - Specifies the driver memory of the spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Executor
Cores float64 - Specifies the number of CPU cores of each executor in the Spark
application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Executor
Memory string Specifies the executor memory of the spark application. application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job.->NOTE: The unit must be provided, such as GB or MB.
- Executors float64
- Specifies the number of executors in a spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Files []string
- Specifies a list of the other dependencies name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<dependent files>
. Changing this parameter will submit a new spark job. - Jars []string
- Specifies a list of the jar package name which has been uploaded to the DLI
resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - Main
Class string - Specifies the main class of the spark job.
Required if the
app_name
is the JAR type. Changing this parameter will submit a new spark job. - Max
Retries float64 - Specifies the maximum retry times.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Modules []string
- Specifies a list of modules that depend on system resources.
The dependent modules and corresponding services are as follows.
Changing this parameter will submit a new spark job.
- sys.datasource.hbase: CloudTable/MRS HBase
- sys.datasource.opentsdb: CloudTable/MRS OpenTSDB
- sys.datasource.rds: RDS MySQL
- sys.datasource.css: CSS
- Name string
- Specifies the spark job name. The value contains a maximum of 128 characters. Changing this parameter will submit a new spark job.
- Python
Files []string - Specifies a list of the python file name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<python file name>
. Changing this parameter will submit a new spark job. - Region string
- Specifies the region in which to submit a spark job. If omitted, the provider-level region will be used. Changing this parameter will submit a new spark job.
- Specification string
Specifies the compute resource type for spark application. The available types and related specifications are as follows, default to minimum configuration (type A). Changing this parameter will submit a new spark job.
| type | resource | driver cores | excutor cores | driver memory | executor memory | num executor | | ---- | ---- | ---- | ---- | ---- | ---- | ---- | | A | 8 vCPUs, 32-GB memory | 2 | 1 | 7G | 4G | 6 | | B | 16 vCPUs, 64-GB memory | 2 | 2 | 7G | 8G | 7 | | C | 32 vCPUs, 128-GB memory | 4 | 2 | 12G | 8G | 14 |
- Timeouts
Dli
Spark Job Timeouts Args
- app
Name String - Specifies the name of the package that is of the JAR or python file type and
has been uploaded to the DLI resource management system.
The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - queue
Name String - Specifies the DLI queue name. Changing this parameter will submit a new spark job.
- app
Parameters String - Specifies the input parameters of the main class. Changing this parameter will submit a new spark job.
- configurations Map<String,String>
- Specifies the configuration items of the DLI spark.
Please following the document of Spark configurations for
this argument. If you want to enable the
access metadata
of DLI spark in Flexibleengine, please setspark.dli.metaAccess.enable
totrue
. Changing this parameter will submit a new spark job. - dependent
Packages List<DliSpark Job Dependent Package> - Specifies a list of package resource objects. The object structure is documented below. Changing this parameter will submit a new spark job.
- dli
Spark StringJob Id - ID of the spark job.
- driver
Cores Double - Specifies the number of CPU cores of the Spark application driver.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - driver
Memory String - Specifies the driver memory of the spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor
Cores Double - Specifies the number of CPU cores of each executor in the Spark
application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor
Memory String Specifies the executor memory of the spark application. application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job.->NOTE: The unit must be provided, such as GB or MB.
- executors Double
- Specifies the number of executors in a spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - files List<String>
- Specifies a list of the other dependencies name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<dependent files>
. Changing this parameter will submit a new spark job. - jars List<String>
- Specifies a list of the jar package name which has been uploaded to the DLI
resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - main
Class String - Specifies the main class of the spark job.
Required if the
app_name
is the JAR type. Changing this parameter will submit a new spark job. - max
Retries Double - Specifies the maximum retry times.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - modules List<String>
- Specifies a list of modules that depend on system resources.
The dependent modules and corresponding services are as follows.
Changing this parameter will submit a new spark job.
- sys.datasource.hbase: CloudTable/MRS HBase
- sys.datasource.opentsdb: CloudTable/MRS OpenTSDB
- sys.datasource.rds: RDS MySQL
- sys.datasource.css: CSS
- name String
- Specifies the spark job name. The value contains a maximum of 128 characters. Changing this parameter will submit a new spark job.
- python
Files List<String> - Specifies a list of the python file name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<python file name>
. Changing this parameter will submit a new spark job. - region String
- Specifies the region in which to submit a spark job. If omitted, the provider-level region will be used. Changing this parameter will submit a new spark job.
- specification String
Specifies the compute resource type for spark application. The available types and related specifications are as follows, default to minimum configuration (type A). Changing this parameter will submit a new spark job.
| type | resource | driver cores | excutor cores | driver memory | executor memory | num executor | | ---- | ---- | ---- | ---- | ---- | ---- | ---- | | A | 8 vCPUs, 32-GB memory | 2 | 1 | 7G | 4G | 6 | | B | 16 vCPUs, 64-GB memory | 2 | 2 | 7G | 8G | 7 | | C | 32 vCPUs, 128-GB memory | 4 | 2 | 12G | 8G | 14 |
- timeouts
Dli
Spark Job Timeouts
- app
Name string - Specifies the name of the package that is of the JAR or python file type and
has been uploaded to the DLI resource management system.
The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - queue
Name string - Specifies the DLI queue name. Changing this parameter will submit a new spark job.
- app
Parameters string - Specifies the input parameters of the main class. Changing this parameter will submit a new spark job.
- configurations {[key: string]: string}
- Specifies the configuration items of the DLI spark.
Please following the document of Spark configurations for
this argument. If you want to enable the
access metadata
of DLI spark in Flexibleengine, please setspark.dli.metaAccess.enable
totrue
. Changing this parameter will submit a new spark job. - dependent
Packages DliSpark Job Dependent Package[] - Specifies a list of package resource objects. The object structure is documented below. Changing this parameter will submit a new spark job.
- dli
Spark stringJob Id - ID of the spark job.
- driver
Cores number - Specifies the number of CPU cores of the Spark application driver.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - driver
Memory string - Specifies the driver memory of the spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor
Cores number - Specifies the number of CPU cores of each executor in the Spark
application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor
Memory string Specifies the executor memory of the spark application. application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job.->NOTE: The unit must be provided, such as GB or MB.
- executors number
- Specifies the number of executors in a spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - files string[]
- Specifies a list of the other dependencies name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<dependent files>
. Changing this parameter will submit a new spark job. - jars string[]
- Specifies a list of the jar package name which has been uploaded to the DLI
resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - main
Class string - Specifies the main class of the spark job.
Required if the
app_name
is the JAR type. Changing this parameter will submit a new spark job. - max
Retries number - Specifies the maximum retry times.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - modules string[]
- Specifies a list of modules that depend on system resources.
The dependent modules and corresponding services are as follows.
Changing this parameter will submit a new spark job.
- sys.datasource.hbase: CloudTable/MRS HBase
- sys.datasource.opentsdb: CloudTable/MRS OpenTSDB
- sys.datasource.rds: RDS MySQL
- sys.datasource.css: CSS
- name string
- Specifies the spark job name. The value contains a maximum of 128 characters. Changing this parameter will submit a new spark job.
- python
Files string[] - Specifies a list of the python file name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<python file name>
. Changing this parameter will submit a new spark job. - region string
- Specifies the region in which to submit a spark job. If omitted, the provider-level region will be used. Changing this parameter will submit a new spark job.
- specification string
Specifies the compute resource type for spark application. The available types and related specifications are as follows, default to minimum configuration (type A). Changing this parameter will submit a new spark job.
| type | resource | driver cores | excutor cores | driver memory | executor memory | num executor | | ---- | ---- | ---- | ---- | ---- | ---- | ---- | | A | 8 vCPUs, 32-GB memory | 2 | 1 | 7G | 4G | 6 | | B | 16 vCPUs, 64-GB memory | 2 | 2 | 7G | 8G | 7 | | C | 32 vCPUs, 128-GB memory | 4 | 2 | 12G | 8G | 14 |
- timeouts
Dli
Spark Job Timeouts
- app_
name str - Specifies the name of the package that is of the JAR or python file type and
has been uploaded to the DLI resource management system.
The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - queue_
name str - Specifies the DLI queue name. Changing this parameter will submit a new spark job.
- app_
parameters str - Specifies the input parameters of the main class. Changing this parameter will submit a new spark job.
- configurations Mapping[str, str]
- Specifies the configuration items of the DLI spark.
Please following the document of Spark configurations for
this argument. If you want to enable the
access metadata
of DLI spark in Flexibleengine, please setspark.dli.metaAccess.enable
totrue
. Changing this parameter will submit a new spark job. - dependent_
packages Sequence[DliSpark Job Dependent Package Args] - Specifies a list of package resource objects. The object structure is documented below. Changing this parameter will submit a new spark job.
- dli_
spark_ strjob_ id - ID of the spark job.
- driver_
cores float - Specifies the number of CPU cores of the Spark application driver.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - driver_
memory str - Specifies the driver memory of the spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor_
cores float - Specifies the number of CPU cores of each executor in the Spark
application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor_
memory str Specifies the executor memory of the spark application. application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job.->NOTE: The unit must be provided, such as GB or MB.
- executors float
- Specifies the number of executors in a spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - files Sequence[str]
- Specifies a list of the other dependencies name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<dependent files>
. Changing this parameter will submit a new spark job. - jars Sequence[str]
- Specifies a list of the jar package name which has been uploaded to the DLI
resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - main_
class str - Specifies the main class of the spark job.
Required if the
app_name
is the JAR type. Changing this parameter will submit a new spark job. - max_
retries float - Specifies the maximum retry times.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - modules Sequence[str]
- Specifies a list of modules that depend on system resources.
The dependent modules and corresponding services are as follows.
Changing this parameter will submit a new spark job.
- sys.datasource.hbase: CloudTable/MRS HBase
- sys.datasource.opentsdb: CloudTable/MRS OpenTSDB
- sys.datasource.rds: RDS MySQL
- sys.datasource.css: CSS
- name str
- Specifies the spark job name. The value contains a maximum of 128 characters. Changing this parameter will submit a new spark job.
- python_
files Sequence[str] - Specifies a list of the python file name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<python file name>
. Changing this parameter will submit a new spark job. - region str
- Specifies the region in which to submit a spark job. If omitted, the provider-level region will be used. Changing this parameter will submit a new spark job.
- specification str
Specifies the compute resource type for spark application. The available types and related specifications are as follows, default to minimum configuration (type A). Changing this parameter will submit a new spark job.
| type | resource | driver cores | excutor cores | driver memory | executor memory | num executor | | ---- | ---- | ---- | ---- | ---- | ---- | ---- | | A | 8 vCPUs, 32-GB memory | 2 | 1 | 7G | 4G | 6 | | B | 16 vCPUs, 64-GB memory | 2 | 2 | 7G | 8G | 7 | | C | 32 vCPUs, 128-GB memory | 4 | 2 | 12G | 8G | 14 |
- timeouts
Dli
Spark Job Timeouts Args
- app
Name String - Specifies the name of the package that is of the JAR or python file type and
has been uploaded to the DLI resource management system.
The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - queue
Name String - Specifies the DLI queue name. Changing this parameter will submit a new spark job.
- app
Parameters String - Specifies the input parameters of the main class. Changing this parameter will submit a new spark job.
- configurations Map<String>
- Specifies the configuration items of the DLI spark.
Please following the document of Spark configurations for
this argument. If you want to enable the
access metadata
of DLI spark in Flexibleengine, please setspark.dli.metaAccess.enable
totrue
. Changing this parameter will submit a new spark job. - dependent
Packages List<Property Map> - Specifies a list of package resource objects. The object structure is documented below. Changing this parameter will submit a new spark job.
- dli
Spark StringJob Id - ID of the spark job.
- driver
Cores Number - Specifies the number of CPU cores of the Spark application driver.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - driver
Memory String - Specifies the driver memory of the spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor
Cores Number - Specifies the number of CPU cores of each executor in the Spark
application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor
Memory String Specifies the executor memory of the spark application. application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job.->NOTE: The unit must be provided, such as GB or MB.
- executors Number
- Specifies the number of executors in a spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - files List<String>
- Specifies a list of the other dependencies name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<dependent files>
. Changing this parameter will submit a new spark job. - jars List<String>
- Specifies a list of the jar package name which has been uploaded to the DLI
resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - main
Class String - Specifies the main class of the spark job.
Required if the
app_name
is the JAR type. Changing this parameter will submit a new spark job. - max
Retries Number - Specifies the maximum retry times.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - modules List<String>
- Specifies a list of modules that depend on system resources.
The dependent modules and corresponding services are as follows.
Changing this parameter will submit a new spark job.
- sys.datasource.hbase: CloudTable/MRS HBase
- sys.datasource.opentsdb: CloudTable/MRS OpenTSDB
- sys.datasource.rds: RDS MySQL
- sys.datasource.css: CSS
- name String
- Specifies the spark job name. The value contains a maximum of 128 characters. Changing this parameter will submit a new spark job.
- python
Files List<String> - Specifies a list of the python file name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<python file name>
. Changing this parameter will submit a new spark job. - region String
- Specifies the region in which to submit a spark job. If omitted, the provider-level region will be used. Changing this parameter will submit a new spark job.
- specification String
Specifies the compute resource type for spark application. The available types and related specifications are as follows, default to minimum configuration (type A). Changing this parameter will submit a new spark job.
| type | resource | driver cores | excutor cores | driver memory | executor memory | num executor | | ---- | ---- | ---- | ---- | ---- | ---- | ---- | | A | 8 vCPUs, 32-GB memory | 2 | 1 | 7G | 4G | 6 | | B | 16 vCPUs, 64-GB memory | 2 | 2 | 7G | 8G | 7 | | C | 32 vCPUs, 128-GB memory | 4 | 2 | 12G | 8G | 14 |
- timeouts Property Map
Outputs
All input properties are implicitly available as output properties. Additionally, the DliSparkJob resource produces the following output properties:
- created_
at str - Time of the DLI spark job submit.
- id str
- The provider-assigned unique ID for this managed resource.
- owner str
- The owner of the spark job.
Look up Existing DliSparkJob Resource
Get an existing DliSparkJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DliSparkJobState, opts?: CustomResourceOptions): DliSparkJob
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
app_name: Optional[str] = None,
app_parameters: Optional[str] = None,
configurations: Optional[Mapping[str, str]] = None,
created_at: Optional[str] = None,
dependent_packages: Optional[Sequence[DliSparkJobDependentPackageArgs]] = None,
dli_spark_job_id: Optional[str] = None,
driver_cores: Optional[float] = None,
driver_memory: Optional[str] = None,
executor_cores: Optional[float] = None,
executor_memory: Optional[str] = None,
executors: Optional[float] = None,
files: Optional[Sequence[str]] = None,
jars: Optional[Sequence[str]] = None,
main_class: Optional[str] = None,
max_retries: Optional[float] = None,
modules: Optional[Sequence[str]] = None,
name: Optional[str] = None,
owner: Optional[str] = None,
python_files: Optional[Sequence[str]] = None,
queue_name: Optional[str] = None,
region: Optional[str] = None,
specification: Optional[str] = None,
timeouts: Optional[DliSparkJobTimeoutsArgs] = None) -> DliSparkJob
func GetDliSparkJob(ctx *Context, name string, id IDInput, state *DliSparkJobState, opts ...ResourceOption) (*DliSparkJob, error)
public static DliSparkJob Get(string name, Input<string> id, DliSparkJobState? state, CustomResourceOptions? opts = null)
public static DliSparkJob get(String name, Output<String> id, DliSparkJobState state, CustomResourceOptions options)
resources: _: type: flexibleengine:DliSparkJob get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- App
Name string - Specifies the name of the package that is of the JAR or python file type and
has been uploaded to the DLI resource management system.
The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - App
Parameters string - Specifies the input parameters of the main class. Changing this parameter will submit a new spark job.
- Configurations Dictionary<string, string>
- Specifies the configuration items of the DLI spark.
Please following the document of Spark configurations for
this argument. If you want to enable the
access metadata
of DLI spark in Flexibleengine, please setspark.dli.metaAccess.enable
totrue
. Changing this parameter will submit a new spark job. - Created
At string - Time of the DLI spark job submit.
- Dependent
Packages List<DliSpark Job Dependent Package> - Specifies a list of package resource objects. The object structure is documented below. Changing this parameter will submit a new spark job.
- Dli
Spark stringJob Id - ID of the spark job.
- Driver
Cores double - Specifies the number of CPU cores of the Spark application driver.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Driver
Memory string - Specifies the driver memory of the spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Executor
Cores double - Specifies the number of CPU cores of each executor in the Spark
application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Executor
Memory string Specifies the executor memory of the spark application. application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job.->NOTE: The unit must be provided, such as GB or MB.
- Executors double
- Specifies the number of executors in a spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Files List<string>
- Specifies a list of the other dependencies name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<dependent files>
. Changing this parameter will submit a new spark job. - Jars List<string>
- Specifies a list of the jar package name which has been uploaded to the DLI
resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - Main
Class string - Specifies the main class of the spark job.
Required if the
app_name
is the JAR type. Changing this parameter will submit a new spark job. - Max
Retries double - Specifies the maximum retry times.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Modules List<string>
- Specifies a list of modules that depend on system resources.
The dependent modules and corresponding services are as follows.
Changing this parameter will submit a new spark job.
- sys.datasource.hbase: CloudTable/MRS HBase
- sys.datasource.opentsdb: CloudTable/MRS OpenTSDB
- sys.datasource.rds: RDS MySQL
- sys.datasource.css: CSS
- Name string
- Specifies the spark job name. The value contains a maximum of 128 characters. Changing this parameter will submit a new spark job.
- Owner string
- The owner of the spark job.
- Python
Files List<string> - Specifies a list of the python file name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<python file name>
. Changing this parameter will submit a new spark job. - Queue
Name string - Specifies the DLI queue name. Changing this parameter will submit a new spark job.
- Region string
- Specifies the region in which to submit a spark job. If omitted, the provider-level region will be used. Changing this parameter will submit a new spark job.
- Specification string
Specifies the compute resource type for spark application. The available types and related specifications are as follows, default to minimum configuration (type A). Changing this parameter will submit a new spark job.
| type | resource | driver cores | excutor cores | driver memory | executor memory | num executor | | ---- | ---- | ---- | ---- | ---- | ---- | ---- | | A | 8 vCPUs, 32-GB memory | 2 | 1 | 7G | 4G | 6 | | B | 16 vCPUs, 64-GB memory | 2 | 2 | 7G | 8G | 7 | | C | 32 vCPUs, 128-GB memory | 4 | 2 | 12G | 8G | 14 |
- Timeouts
Dli
Spark Job Timeouts
- App
Name string - Specifies the name of the package that is of the JAR or python file type and
has been uploaded to the DLI resource management system.
The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - App
Parameters string - Specifies the input parameters of the main class. Changing this parameter will submit a new spark job.
- Configurations map[string]string
- Specifies the configuration items of the DLI spark.
Please following the document of Spark configurations for
this argument. If you want to enable the
access metadata
of DLI spark in Flexibleengine, please setspark.dli.metaAccess.enable
totrue
. Changing this parameter will submit a new spark job. - Created
At string - Time of the DLI spark job submit.
- Dependent
Packages []DliSpark Job Dependent Package Args - Specifies a list of package resource objects. The object structure is documented below. Changing this parameter will submit a new spark job.
- Dli
Spark stringJob Id - ID of the spark job.
- Driver
Cores float64 - Specifies the number of CPU cores of the Spark application driver.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Driver
Memory string - Specifies the driver memory of the spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Executor
Cores float64 - Specifies the number of CPU cores of each executor in the Spark
application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Executor
Memory string Specifies the executor memory of the spark application. application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job.->NOTE: The unit must be provided, such as GB or MB.
- Executors float64
- Specifies the number of executors in a spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Files []string
- Specifies a list of the other dependencies name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<dependent files>
. Changing this parameter will submit a new spark job. - Jars []string
- Specifies a list of the jar package name which has been uploaded to the DLI
resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - Main
Class string - Specifies the main class of the spark job.
Required if the
app_name
is the JAR type. Changing this parameter will submit a new spark job. - Max
Retries float64 - Specifies the maximum retry times.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - Modules []string
- Specifies a list of modules that depend on system resources.
The dependent modules and corresponding services are as follows.
Changing this parameter will submit a new spark job.
- sys.datasource.hbase: CloudTable/MRS HBase
- sys.datasource.opentsdb: CloudTable/MRS OpenTSDB
- sys.datasource.rds: RDS MySQL
- sys.datasource.css: CSS
- Name string
- Specifies the spark job name. The value contains a maximum of 128 characters. Changing this parameter will submit a new spark job.
- Owner string
- The owner of the spark job.
- Python
Files []string - Specifies a list of the python file name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<python file name>
. Changing this parameter will submit a new spark job. - Queue
Name string - Specifies the DLI queue name. Changing this parameter will submit a new spark job.
- Region string
- Specifies the region in which to submit a spark job. If omitted, the provider-level region will be used. Changing this parameter will submit a new spark job.
- Specification string
Specifies the compute resource type for spark application. The available types and related specifications are as follows, default to minimum configuration (type A). Changing this parameter will submit a new spark job.
| type | resource | driver cores | excutor cores | driver memory | executor memory | num executor | | ---- | ---- | ---- | ---- | ---- | ---- | ---- | | A | 8 vCPUs, 32-GB memory | 2 | 1 | 7G | 4G | 6 | | B | 16 vCPUs, 64-GB memory | 2 | 2 | 7G | 8G | 7 | | C | 32 vCPUs, 128-GB memory | 4 | 2 | 12G | 8G | 14 |
- Timeouts
Dli
Spark Job Timeouts Args
- app
Name String - Specifies the name of the package that is of the JAR or python file type and
has been uploaded to the DLI resource management system.
The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - app
Parameters String - Specifies the input parameters of the main class. Changing this parameter will submit a new spark job.
- configurations Map<String,String>
- Specifies the configuration items of the DLI spark.
Please following the document of Spark configurations for
this argument. If you want to enable the
access metadata
of DLI spark in Flexibleengine, please setspark.dli.metaAccess.enable
totrue
. Changing this parameter will submit a new spark job. - created
At String - Time of the DLI spark job submit.
- dependent
Packages List<DliSpark Job Dependent Package> - Specifies a list of package resource objects. The object structure is documented below. Changing this parameter will submit a new spark job.
- dli
Spark StringJob Id - ID of the spark job.
- driver
Cores Double - Specifies the number of CPU cores of the Spark application driver.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - driver
Memory String - Specifies the driver memory of the spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor
Cores Double - Specifies the number of CPU cores of each executor in the Spark
application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor
Memory String Specifies the executor memory of the spark application. application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job.->NOTE: The unit must be provided, such as GB or MB.
- executors Double
- Specifies the number of executors in a spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - files List<String>
- Specifies a list of the other dependencies name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<dependent files>
. Changing this parameter will submit a new spark job. - jars List<String>
- Specifies a list of the jar package name which has been uploaded to the DLI
resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - main
Class String - Specifies the main class of the spark job.
Required if the
app_name
is the JAR type. Changing this parameter will submit a new spark job. - max
Retries Double - Specifies the maximum retry times.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - modules List<String>
- Specifies a list of modules that depend on system resources.
The dependent modules and corresponding services are as follows.
Changing this parameter will submit a new spark job.
- sys.datasource.hbase: CloudTable/MRS HBase
- sys.datasource.opentsdb: CloudTable/MRS OpenTSDB
- sys.datasource.rds: RDS MySQL
- sys.datasource.css: CSS
- name String
- Specifies the spark job name. The value contains a maximum of 128 characters. Changing this parameter will submit a new spark job.
- owner String
- The owner of the spark job.
- python
Files List<String> - Specifies a list of the python file name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<python file name>
. Changing this parameter will submit a new spark job. - queue
Name String - Specifies the DLI queue name. Changing this parameter will submit a new spark job.
- region String
- Specifies the region in which to submit a spark job. If omitted, the provider-level region will be used. Changing this parameter will submit a new spark job.
- specification String
Specifies the compute resource type for spark application. The available types and related specifications are as follows, default to minimum configuration (type A). Changing this parameter will submit a new spark job.
| type | resource | driver cores | excutor cores | driver memory | executor memory | num executor | | ---- | ---- | ---- | ---- | ---- | ---- | ---- | | A | 8 vCPUs, 32-GB memory | 2 | 1 | 7G | 4G | 6 | | B | 16 vCPUs, 64-GB memory | 2 | 2 | 7G | 8G | 7 | | C | 32 vCPUs, 128-GB memory | 4 | 2 | 12G | 8G | 14 |
- timeouts
Dli
Spark Job Timeouts
- app
Name string - Specifies the name of the package that is of the JAR or python file type and
has been uploaded to the DLI resource management system.
The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - app
Parameters string - Specifies the input parameters of the main class. Changing this parameter will submit a new spark job.
- configurations {[key: string]: string}
- Specifies the configuration items of the DLI spark.
Please following the document of Spark configurations for
this argument. If you want to enable the
access metadata
of DLI spark in Flexibleengine, please setspark.dli.metaAccess.enable
totrue
. Changing this parameter will submit a new spark job. - created
At string - Time of the DLI spark job submit.
- dependent
Packages DliSpark Job Dependent Package[] - Specifies a list of package resource objects. The object structure is documented below. Changing this parameter will submit a new spark job.
- dli
Spark stringJob Id - ID of the spark job.
- driver
Cores number - Specifies the number of CPU cores of the Spark application driver.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - driver
Memory string - Specifies the driver memory of the spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor
Cores number - Specifies the number of CPU cores of each executor in the Spark
application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor
Memory string Specifies the executor memory of the spark application. application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job.->NOTE: The unit must be provided, such as GB or MB.
- executors number
- Specifies the number of executors in a spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - files string[]
- Specifies a list of the other dependencies name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<dependent files>
. Changing this parameter will submit a new spark job. - jars string[]
- Specifies a list of the jar package name which has been uploaded to the DLI
resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - main
Class string - Specifies the main class of the spark job.
Required if the
app_name
is the JAR type. Changing this parameter will submit a new spark job. - max
Retries number - Specifies the maximum retry times.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - modules string[]
- Specifies a list of modules that depend on system resources.
The dependent modules and corresponding services are as follows.
Changing this parameter will submit a new spark job.
- sys.datasource.hbase: CloudTable/MRS HBase
- sys.datasource.opentsdb: CloudTable/MRS OpenTSDB
- sys.datasource.rds: RDS MySQL
- sys.datasource.css: CSS
- name string
- Specifies the spark job name. The value contains a maximum of 128 characters. Changing this parameter will submit a new spark job.
- owner string
- The owner of the spark job.
- python
Files string[] - Specifies a list of the python file name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<python file name>
. Changing this parameter will submit a new spark job. - queue
Name string - Specifies the DLI queue name. Changing this parameter will submit a new spark job.
- region string
- Specifies the region in which to submit a spark job. If omitted, the provider-level region will be used. Changing this parameter will submit a new spark job.
- specification string
Specifies the compute resource type for spark application. The available types and related specifications are as follows, default to minimum configuration (type A). Changing this parameter will submit a new spark job.
| type | resource | driver cores | excutor cores | driver memory | executor memory | num executor | | ---- | ---- | ---- | ---- | ---- | ---- | ---- | | A | 8 vCPUs, 32-GB memory | 2 | 1 | 7G | 4G | 6 | | B | 16 vCPUs, 64-GB memory | 2 | 2 | 7G | 8G | 7 | | C | 32 vCPUs, 128-GB memory | 4 | 2 | 12G | 8G | 14 |
- timeouts
Dli
Spark Job Timeouts
- app_
name str - Specifies the name of the package that is of the JAR or python file type and
has been uploaded to the DLI resource management system.
The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - app_
parameters str - Specifies the input parameters of the main class. Changing this parameter will submit a new spark job.
- configurations Mapping[str, str]
- Specifies the configuration items of the DLI spark.
Please following the document of Spark configurations for
this argument. If you want to enable the
access metadata
of DLI spark in Flexibleengine, please setspark.dli.metaAccess.enable
totrue
. Changing this parameter will submit a new spark job. - created_
at str - Time of the DLI spark job submit.
- dependent_
packages Sequence[DliSpark Job Dependent Package Args] - Specifies a list of package resource objects. The object structure is documented below. Changing this parameter will submit a new spark job.
- dli_
spark_ strjob_ id - ID of the spark job.
- driver_
cores float - Specifies the number of CPU cores of the Spark application driver.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - driver_
memory str - Specifies the driver memory of the spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor_
cores float - Specifies the number of CPU cores of each executor in the Spark
application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor_
memory str Specifies the executor memory of the spark application. application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job.->NOTE: The unit must be provided, such as GB or MB.
- executors float
- Specifies the number of executors in a spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - files Sequence[str]
- Specifies a list of the other dependencies name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<dependent files>
. Changing this parameter will submit a new spark job. - jars Sequence[str]
- Specifies a list of the jar package name which has been uploaded to the DLI
resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - main_
class str - Specifies the main class of the spark job.
Required if the
app_name
is the JAR type. Changing this parameter will submit a new spark job. - max_
retries float - Specifies the maximum retry times.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - modules Sequence[str]
- Specifies a list of modules that depend on system resources.
The dependent modules and corresponding services are as follows.
Changing this parameter will submit a new spark job.
- sys.datasource.hbase: CloudTable/MRS HBase
- sys.datasource.opentsdb: CloudTable/MRS OpenTSDB
- sys.datasource.rds: RDS MySQL
- sys.datasource.css: CSS
- name str
- Specifies the spark job name. The value contains a maximum of 128 characters. Changing this parameter will submit a new spark job.
- owner str
- The owner of the spark job.
- python_
files Sequence[str] - Specifies a list of the python file name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<python file name>
. Changing this parameter will submit a new spark job. - queue_
name str - Specifies the DLI queue name. Changing this parameter will submit a new spark job.
- region str
- Specifies the region in which to submit a spark job. If omitted, the provider-level region will be used. Changing this parameter will submit a new spark job.
- specification str
Specifies the compute resource type for spark application. The available types and related specifications are as follows, default to minimum configuration (type A). Changing this parameter will submit a new spark job.
| type | resource | driver cores | excutor cores | driver memory | executor memory | num executor | | ---- | ---- | ---- | ---- | ---- | ---- | ---- | | A | 8 vCPUs, 32-GB memory | 2 | 1 | 7G | 4G | 6 | | B | 16 vCPUs, 64-GB memory | 2 | 2 | 7G | 8G | 7 | | C | 32 vCPUs, 128-GB memory | 4 | 2 | 12G | 8G | 14 |
- timeouts
Dli
Spark Job Timeouts Args
- app
Name String - Specifies the name of the package that is of the JAR or python file type and
has been uploaded to the DLI resource management system.
The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - app
Parameters String - Specifies the input parameters of the main class. Changing this parameter will submit a new spark job.
- configurations Map<String>
- Specifies the configuration items of the DLI spark.
Please following the document of Spark configurations for
this argument. If you want to enable the
access metadata
of DLI spark in Flexibleengine, please setspark.dli.metaAccess.enable
totrue
. Changing this parameter will submit a new spark job. - created
At String - Time of the DLI spark job submit.
- dependent
Packages List<Property Map> - Specifies a list of package resource objects. The object structure is documented below. Changing this parameter will submit a new spark job.
- dli
Spark StringJob Id - ID of the spark job.
- driver
Cores Number - Specifies the number of CPU cores of the Spark application driver.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - driver
Memory String - Specifies the driver memory of the spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor
Cores Number - Specifies the number of CPU cores of each executor in the Spark
application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - executor
Memory String Specifies the executor memory of the spark application. application. The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job.->NOTE: The unit must be provided, such as GB or MB.
- executors Number
- Specifies the number of executors in a spark application.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - files List<String>
- Specifies a list of the other dependencies name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<dependent files>
. Changing this parameter will submit a new spark job. - jars List<String>
- Specifies a list of the jar package name which has been uploaded to the DLI
resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<package name>
. Changing this parameter will submit a new spark job. - main
Class String - Specifies the main class of the spark job.
Required if the
app_name
is the JAR type. Changing this parameter will submit a new spark job. - max
Retries Number - Specifies the maximum retry times.
The default value of this value corresponds to the configuration of the selected
specification
. If you set this value instead of the default value,specification
will be invalid. Changing this parameter will submit a new spark job. - modules List<String>
- Specifies a list of modules that depend on system resources.
The dependent modules and corresponding services are as follows.
Changing this parameter will submit a new spark job.
- sys.datasource.hbase: CloudTable/MRS HBase
- sys.datasource.opentsdb: CloudTable/MRS OpenTSDB
- sys.datasource.rds: RDS MySQL
- sys.datasource.css: CSS
- name String
- Specifies the spark job name. The value contains a maximum of 128 characters. Changing this parameter will submit a new spark job.
- owner String
- The owner of the spark job.
- python
Files List<String> - Specifies a list of the python file name which has been uploaded to the
DLI resource management system. The OBS paths are allowed, for example,
obs://<bucket name>/<python file name>
. Changing this parameter will submit a new spark job. - queue
Name String - Specifies the DLI queue name. Changing this parameter will submit a new spark job.
- region String
- Specifies the region in which to submit a spark job. If omitted, the provider-level region will be used. Changing this parameter will submit a new spark job.
- specification String
Specifies the compute resource type for spark application. The available types and related specifications are as follows, default to minimum configuration (type A). Changing this parameter will submit a new spark job.
| type | resource | driver cores | excutor cores | driver memory | executor memory | num executor | | ---- | ---- | ---- | ---- | ---- | ---- | ---- | | A | 8 vCPUs, 32-GB memory | 2 | 1 | 7G | 4G | 6 | | B | 16 vCPUs, 64-GB memory | 2 | 2 | 7G | 8G | 7 | | C | 32 vCPUs, 128-GB memory | 4 | 2 | 12G | 8G | 14 |
- timeouts Property Map
Supporting Types
DliSparkJobDependentPackage, DliSparkJobDependentPackageArgs
DliSparkJobDependentPackagePackage, DliSparkJobDependentPackagePackageArgs
- Package
Name string - Specifies the resource name of the package. Changing this parameter will submit a new spark job.
- Type string
- Specifies the resource type of the package. Changing this parameter will submit a new spark job.
- Package
Name string - Specifies the resource name of the package. Changing this parameter will submit a new spark job.
- Type string
- Specifies the resource type of the package. Changing this parameter will submit a new spark job.
- package
Name String - Specifies the resource name of the package. Changing this parameter will submit a new spark job.
- type String
- Specifies the resource type of the package. Changing this parameter will submit a new spark job.
- package
Name string - Specifies the resource name of the package. Changing this parameter will submit a new spark job.
- type string
- Specifies the resource type of the package. Changing this parameter will submit a new spark job.
- package_
name str - Specifies the resource name of the package. Changing this parameter will submit a new spark job.
- type str
- Specifies the resource type of the package. Changing this parameter will submit a new spark job.
- package
Name String - Specifies the resource name of the package. Changing this parameter will submit a new spark job.
- type String
- Specifies the resource type of the package. Changing this parameter will submit a new spark job.
DliSparkJobTimeouts, DliSparkJobTimeoutsArgs
- Delete string
- Delete string
- delete String
- delete string
- delete str
- delete String
Package Details
- Repository
- flexibleengine flexibleenginecloud/terraform-provider-flexibleengine
- License
- Notes
- This Pulumi package is based on the
flexibleengine
Terraform Provider.