opentelekomcloud.MrsJobV1
Explore with Pulumi AI
Up-to-date reference of API arguments for MRS job you can get at documentation portal
Manages resource job within OpenTelekomCloud MRS.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as opentelekomcloud from "@pulumi/opentelekomcloud";
const job1 = new opentelekomcloud.MrsJobV1("job1", {
arguments: "wordcount",
clusterId: "ef43d2ff-1ecf-4f13-bd0c-0004c429a058",
input: "s3a://wordcount/input/",
jarPath: "s3a://wordcount/program/hadoop-mapreduce-examples-2.7.5.jar",
jobLog: "s3a://wordcount/log/",
jobName: "test_mapreduce_job1",
jobType: 1,
output: "s3a://wordcount/output/",
});
import pulumi
import pulumi_opentelekomcloud as opentelekomcloud
job1 = opentelekomcloud.MrsJobV1("job1",
arguments="wordcount",
cluster_id="ef43d2ff-1ecf-4f13-bd0c-0004c429a058",
input="s3a://wordcount/input/",
jar_path="s3a://wordcount/program/hadoop-mapreduce-examples-2.7.5.jar",
job_log="s3a://wordcount/log/",
job_name="test_mapreduce_job1",
job_type=1,
output="s3a://wordcount/output/")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/opentelekomcloud/opentelekomcloud"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := opentelekomcloud.NewMrsJobV1(ctx, "job1", &opentelekomcloud.MrsJobV1Args{
Arguments: pulumi.String("wordcount"),
ClusterId: pulumi.String("ef43d2ff-1ecf-4f13-bd0c-0004c429a058"),
Input: pulumi.String("s3a://wordcount/input/"),
JarPath: pulumi.String("s3a://wordcount/program/hadoop-mapreduce-examples-2.7.5.jar"),
JobLog: pulumi.String("s3a://wordcount/log/"),
JobName: pulumi.String("test_mapreduce_job1"),
JobType: pulumi.Float64(1),
Output: pulumi.String("s3a://wordcount/output/"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Opentelekomcloud = Pulumi.Opentelekomcloud;
return await Deployment.RunAsync(() =>
{
var job1 = new Opentelekomcloud.MrsJobV1("job1", new()
{
Arguments = "wordcount",
ClusterId = "ef43d2ff-1ecf-4f13-bd0c-0004c429a058",
Input = "s3a://wordcount/input/",
JarPath = "s3a://wordcount/program/hadoop-mapreduce-examples-2.7.5.jar",
JobLog = "s3a://wordcount/log/",
JobName = "test_mapreduce_job1",
JobType = 1,
Output = "s3a://wordcount/output/",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.opentelekomcloud.MrsJobV1;
import com.pulumi.opentelekomcloud.MrsJobV1Args;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var job1 = new MrsJobV1("job1", MrsJobV1Args.builder()
.arguments("wordcount")
.clusterId("ef43d2ff-1ecf-4f13-bd0c-0004c429a058")
.input("s3a://wordcount/input/")
.jarPath("s3a://wordcount/program/hadoop-mapreduce-examples-2.7.5.jar")
.jobLog("s3a://wordcount/log/")
.jobName("test_mapreduce_job1")
.jobType(1)
.output("s3a://wordcount/output/")
.build());
}
}
resources:
job1:
type: opentelekomcloud:MrsJobV1
properties:
arguments: wordcount
clusterId: ef43d2ff-1ecf-4f13-bd0c-0004c429a058
input: s3a://wordcount/input/
jarPath: s3a://wordcount/program/hadoop-mapreduce-examples-2.7.5.jar
jobLog: s3a://wordcount/log/
jobName: test_mapreduce_job1
jobType: 1
output: s3a://wordcount/output/
Create MrsJobV1 Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new MrsJobV1(name: string, args: MrsJobV1Args, opts?: CustomResourceOptions);
@overload
def MrsJobV1(resource_name: str,
args: MrsJobV1Args,
opts: Optional[ResourceOptions] = None)
@overload
def MrsJobV1(resource_name: str,
opts: Optional[ResourceOptions] = None,
job_type: Optional[float] = None,
job_name: Optional[str] = None,
cluster_id: Optional[str] = None,
jar_path: Optional[str] = None,
region: Optional[str] = None,
is_public: Optional[bool] = None,
is_protected: Optional[bool] = None,
input: Optional[str] = None,
hive_script_path: Optional[str] = None,
arguments: Optional[str] = None,
mrs_job_v1_id: Optional[str] = None,
output: Optional[str] = None,
job_log: Optional[str] = None,
timeouts: Optional[MrsJobV1TimeoutsArgs] = None)
func NewMrsJobV1(ctx *Context, name string, args MrsJobV1Args, opts ...ResourceOption) (*MrsJobV1, error)
public MrsJobV1(string name, MrsJobV1Args args, CustomResourceOptions? opts = null)
public MrsJobV1(String name, MrsJobV1Args args)
public MrsJobV1(String name, MrsJobV1Args args, CustomResourceOptions options)
type: opentelekomcloud:MrsJobV1
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args MrsJobV1Args
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args MrsJobV1Args
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args MrsJobV1Args
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args MrsJobV1Args
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args MrsJobV1Args
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var mrsJobV1Resource = new Opentelekomcloud.MrsJobV1("mrsJobV1Resource", new()
{
JobType = 0,
JobName = "string",
ClusterId = "string",
JarPath = "string",
Region = "string",
IsPublic = false,
IsProtected = false,
Input = "string",
HiveScriptPath = "string",
Arguments = "string",
MrsJobV1Id = "string",
Output = "string",
JobLog = "string",
Timeouts = new Opentelekomcloud.Inputs.MrsJobV1TimeoutsArgs
{
Create = "string",
Delete = "string",
Update = "string",
},
});
example, err := opentelekomcloud.NewMrsJobV1(ctx, "mrsJobV1Resource", &opentelekomcloud.MrsJobV1Args{
JobType: pulumi.Float64(0),
JobName: pulumi.String("string"),
ClusterId: pulumi.String("string"),
JarPath: pulumi.String("string"),
Region: pulumi.String("string"),
IsPublic: pulumi.Bool(false),
IsProtected: pulumi.Bool(false),
Input: pulumi.String("string"),
HiveScriptPath: pulumi.String("string"),
Arguments: pulumi.String("string"),
MrsJobV1Id: pulumi.String("string"),
Output: pulumi.String("string"),
JobLog: pulumi.String("string"),
Timeouts: &opentelekomcloud.MrsJobV1TimeoutsArgs{
Create: pulumi.String("string"),
Delete: pulumi.String("string"),
Update: pulumi.String("string"),
},
})
var mrsJobV1Resource = new MrsJobV1("mrsJobV1Resource", MrsJobV1Args.builder()
.jobType(0)
.jobName("string")
.clusterId("string")
.jarPath("string")
.region("string")
.isPublic(false)
.isProtected(false)
.input("string")
.hiveScriptPath("string")
.arguments("string")
.mrsJobV1Id("string")
.output("string")
.jobLog("string")
.timeouts(MrsJobV1TimeoutsArgs.builder()
.create("string")
.delete("string")
.update("string")
.build())
.build());
mrs_job_v1_resource = opentelekomcloud.MrsJobV1("mrsJobV1Resource",
job_type=0,
job_name="string",
cluster_id="string",
jar_path="string",
region="string",
is_public=False,
is_protected=False,
input="string",
hive_script_path="string",
arguments="string",
mrs_job_v1_id="string",
output="string",
job_log="string",
timeouts={
"create": "string",
"delete": "string",
"update": "string",
})
const mrsJobV1Resource = new opentelekomcloud.MrsJobV1("mrsJobV1Resource", {
jobType: 0,
jobName: "string",
clusterId: "string",
jarPath: "string",
region: "string",
isPublic: false,
isProtected: false,
input: "string",
hiveScriptPath: "string",
arguments: "string",
mrsJobV1Id: "string",
output: "string",
jobLog: "string",
timeouts: {
create: "string",
"delete": "string",
update: "string",
},
});
type: opentelekomcloud:MrsJobV1
properties:
arguments: string
clusterId: string
hiveScriptPath: string
input: string
isProtected: false
isPublic: false
jarPath: string
jobLog: string
jobName: string
jobType: 0
mrsJobV1Id: string
output: string
region: string
timeouts:
create: string
delete: string
update: string
MrsJobV1 Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The MrsJobV1 resource accepts the following input properties:
- Cluster
Id string - Cluster ID
- Jar
Path string - Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
- Job
Name string Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).
Note: Identical job names are allowed but not recommended.
- Job
Type double Job type
- 1: MapReduce
- 2: Spark
- 3: Hive Script
- 4: HiveQL (not supported currently)
- 5: DistCp, importing and exporting data.
- 6: Spark Script
- 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)
Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.
- Arguments string
- Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Hive
Script stringPath - SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
- Input string
- Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Is
Protected bool - Whether a job is protected true false The current version does not support this function.
- Is
Public bool - Whether a job is public true false The current version does not support this function.
- Job
Log string - Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Mrs
Job stringV1Id - Output string
- Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Region string
- Timeouts
Mrs
Job V1Timeouts
- Cluster
Id string - Cluster ID
- Jar
Path string - Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
- Job
Name string Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).
Note: Identical job names are allowed but not recommended.
- Job
Type float64 Job type
- 1: MapReduce
- 2: Spark
- 3: Hive Script
- 4: HiveQL (not supported currently)
- 5: DistCp, importing and exporting data.
- 6: Spark Script
- 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)
Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.
- Arguments string
- Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Hive
Script stringPath - SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
- Input string
- Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Is
Protected bool - Whether a job is protected true false The current version does not support this function.
- Is
Public bool - Whether a job is public true false The current version does not support this function.
- Job
Log string - Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Mrs
Job stringV1Id - Output string
- Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Region string
- Timeouts
Mrs
Job V1Timeouts Args
- cluster
Id String - Cluster ID
- jar
Path String - Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
- job
Name String Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).
Note: Identical job names are allowed but not recommended.
- job
Type Double Job type
- 1: MapReduce
- 2: Spark
- 3: Hive Script
- 4: HiveQL (not supported currently)
- 5: DistCp, importing and exporting data.
- 6: Spark Script
- 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)
Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.
- arguments String
- Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
- hive
Script StringPath - SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
- input String
- Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- is
Protected Boolean - Whether a job is protected true false The current version does not support this function.
- is
Public Boolean - Whether a job is public true false The current version does not support this function.
- job
Log String - Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- mrs
Job StringV1Id - output String
- Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- region String
- timeouts
Mrs
Job V1Timeouts
- cluster
Id string - Cluster ID
- jar
Path string - Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
- job
Name string Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).
Note: Identical job names are allowed but not recommended.
- job
Type number Job type
- 1: MapReduce
- 2: Spark
- 3: Hive Script
- 4: HiveQL (not supported currently)
- 5: DistCp, importing and exporting data.
- 6: Spark Script
- 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)
Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.
- arguments string
- Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
- hive
Script stringPath - SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
- input string
- Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- is
Protected boolean - Whether a job is protected true false The current version does not support this function.
- is
Public boolean - Whether a job is public true false The current version does not support this function.
- job
Log string - Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- mrs
Job stringV1Id - output string
- Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- region string
- timeouts
Mrs
Job V1Timeouts
- cluster_
id str - Cluster ID
- jar_
path str - Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
- job_
name str Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).
Note: Identical job names are allowed but not recommended.
- job_
type float Job type
- 1: MapReduce
- 2: Spark
- 3: Hive Script
- 4: HiveQL (not supported currently)
- 5: DistCp, importing and exporting data.
- 6: Spark Script
- 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)
Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.
- arguments str
- Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
- hive_
script_ strpath - SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
- input str
- Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- is_
protected bool - Whether a job is protected true false The current version does not support this function.
- is_
public bool - Whether a job is public true false The current version does not support this function.
- job_
log str - Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- mrs_
job_ strv1_ id - output str
- Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- region str
- timeouts
Mrs
Job V1Timeouts Args
- cluster
Id String - Cluster ID
- jar
Path String - Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
- job
Name String Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).
Note: Identical job names are allowed but not recommended.
- job
Type Number Job type
- 1: MapReduce
- 2: Spark
- 3: Hive Script
- 4: HiveQL (not supported currently)
- 5: DistCp, importing and exporting data.
- 6: Spark Script
- 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)
Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.
- arguments String
- Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
- hive
Script StringPath - SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
- input String
- Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- is
Protected Boolean - Whether a job is protected true false The current version does not support this function.
- is
Public Boolean - Whether a job is public true false The current version does not support this function.
- job
Log String - Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- mrs
Job StringV1Id - output String
- Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- region String
- timeouts Property Map
Outputs
All input properties are implicitly available as output properties. Additionally, the MrsJobV1 resource produces the following output properties:
Look up Existing MrsJobV1 Resource
Get an existing MrsJobV1 resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: MrsJobV1State, opts?: CustomResourceOptions): MrsJobV1
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
arguments: Optional[str] = None,
cluster_id: Optional[str] = None,
hive_script_path: Optional[str] = None,
input: Optional[str] = None,
is_protected: Optional[bool] = None,
is_public: Optional[bool] = None,
jar_path: Optional[str] = None,
job_log: Optional[str] = None,
job_name: Optional[str] = None,
job_state: Optional[str] = None,
job_type: Optional[float] = None,
mrs_job_v1_id: Optional[str] = None,
output: Optional[str] = None,
region: Optional[str] = None,
timeouts: Optional[MrsJobV1TimeoutsArgs] = None) -> MrsJobV1
func GetMrsJobV1(ctx *Context, name string, id IDInput, state *MrsJobV1State, opts ...ResourceOption) (*MrsJobV1, error)
public static MrsJobV1 Get(string name, Input<string> id, MrsJobV1State? state, CustomResourceOptions? opts = null)
public static MrsJobV1 get(String name, Output<String> id, MrsJobV1State state, CustomResourceOptions options)
resources: _: type: opentelekomcloud:MrsJobV1 get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Arguments string
- Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Cluster
Id string - Cluster ID
- Hive
Script stringPath - SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
- Input string
- Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Is
Protected bool - Whether a job is protected true false The current version does not support this function.
- Is
Public bool - Whether a job is public true false The current version does not support this function.
- Jar
Path string - Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
- Job
Log string - Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Job
Name string Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).
Note: Identical job names are allowed but not recommended.
- Job
State string - Job
Type double Job type
- 1: MapReduce
- 2: Spark
- 3: Hive Script
- 4: HiveQL (not supported currently)
- 5: DistCp, importing and exporting data.
- 6: Spark Script
- 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)
Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.
- Mrs
Job stringV1Id - Output string
- Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Region string
- Timeouts
Mrs
Job V1Timeouts
- Arguments string
- Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Cluster
Id string - Cluster ID
- Hive
Script stringPath - SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
- Input string
- Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Is
Protected bool - Whether a job is protected true false The current version does not support this function.
- Is
Public bool - Whether a job is public true false The current version does not support this function.
- Jar
Path string - Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
- Job
Log string - Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Job
Name string Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).
Note: Identical job names are allowed but not recommended.
- Job
State string - Job
Type float64 Job type
- 1: MapReduce
- 2: Spark
- 3: Hive Script
- 4: HiveQL (not supported currently)
- 5: DistCp, importing and exporting data.
- 6: Spark Script
- 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)
Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.
- Mrs
Job stringV1Id - Output string
- Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- Region string
- Timeouts
Mrs
Job V1Timeouts Args
- arguments String
- Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
- cluster
Id String - Cluster ID
- hive
Script StringPath - SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
- input String
- Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- is
Protected Boolean - Whether a job is protected true false The current version does not support this function.
- is
Public Boolean - Whether a job is public true false The current version does not support this function.
- jar
Path String - Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
- job
Log String - Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- job
Name String Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).
Note: Identical job names are allowed but not recommended.
- job
State String - job
Type Double Job type
- 1: MapReduce
- 2: Spark
- 3: Hive Script
- 4: HiveQL (not supported currently)
- 5: DistCp, importing and exporting data.
- 6: Spark Script
- 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)
Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.
- mrs
Job StringV1Id - output String
- Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- region String
- timeouts
Mrs
Job V1Timeouts
- arguments string
- Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
- cluster
Id string - Cluster ID
- hive
Script stringPath - SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
- input string
- Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- is
Protected boolean - Whether a job is protected true false The current version does not support this function.
- is
Public boolean - Whether a job is public true false The current version does not support this function.
- jar
Path string - Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
- job
Log string - Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- job
Name string Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).
Note: Identical job names are allowed but not recommended.
- job
State string - job
Type number Job type
- 1: MapReduce
- 2: Spark
- 3: Hive Script
- 4: HiveQL (not supported currently)
- 5: DistCp, importing and exporting data.
- 6: Spark Script
- 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)
Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.
- mrs
Job stringV1Id - output string
- Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- region string
- timeouts
Mrs
Job V1Timeouts
- arguments str
- Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
- cluster_
id str - Cluster ID
- hive_
script_ strpath - SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
- input str
- Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- is_
protected bool - Whether a job is protected true false The current version does not support this function.
- is_
public bool - Whether a job is public true false The current version does not support this function.
- jar_
path str - Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
- job_
log str - Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- job_
name str Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).
Note: Identical job names are allowed but not recommended.
- job_
state str - job_
type float Job type
- 1: MapReduce
- 2: Spark
- 3: Hive Script
- 4: HiveQL (not supported currently)
- 5: DistCp, importing and exporting data.
- 6: Spark Script
- 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)
Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.
- mrs_
job_ strv1_ id - output str
- Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- region str
- timeouts
Mrs
Job V1Timeouts Args
- arguments String
- Key parameter for program execution. The parameter is specified by the function of the user's program. MRS is only responsible for loading the parameter. The parameter contains a maximum of 2047 characters, excluding special characters such as ;|&>'<$, and can be empty.
- cluster
Id String - Cluster ID
- hive
Script StringPath - SQL program path This parameter is needed by Spark Script and Hive Script jobs only and must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Ends with .sql. sql is case-insensitive.
- input String
- Path for inputting data, which must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- is
Protected Boolean - Whether a job is protected true false The current version does not support this function.
- is
Public Boolean - Whether a job is public true false The current version does not support this function.
- jar
Path String - Path of the .jar package or .sql file for program execution The parameter must meet the following requirements: Contains a maximum of 1023 characters, excluding special characters such as ;|&><'$. The address cannot be empty or full of spaces. Starts with / or s3a://. Spark Script must end with .sql; while MapReduce and Spark Jar must end with .jar. sql and jar are case-insensitive.
- job
Log String - Path for storing job logs that record job running status. This path must start with / or s3a://. A correct OBS path is required. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- job
Name String Job name Contains only 1 to 64 letters, digits, hyphens (-), and underscores (_).
Note: Identical job names are allowed but not recommended.
- job
State String - job
Type Number Job type
- 1: MapReduce
- 2: Spark
- 3: Hive Script
- 4: HiveQL (not supported currently)
- 5: DistCp, importing and exporting data.
- 6: Spark Script
- 7: Spark SQL, submitting Spark SQL statements. (not supported in this API currently)
Note: Spark and Hive jobs can be added to only clusters including Spark and Hive components.
- mrs
Job StringV1Id - output String
- Path for outputting data, which must start with / or s3a://. A correct OBS path is required. If the path does not exist, the system automatically creates it. The parameter contains a maximum of 1023 characters, excluding special characters such as ;|&>'<$, and can be empty.
- region String
- timeouts Property Map
Supporting Types
MrsJobV1Timeouts, MrsJobV1TimeoutsArgs
Package Details
- Repository
- opentelekomcloud opentelekomcloud/terraform-provider-opentelekomcloud
- License
- Notes
- This Pulumi package is based on the
opentelekomcloud
Terraform Provider.