Google Cloud (GCP) Classic

Pulumi Official
Package maintained by Pulumi
v6.28.0 published on Friday, Jun 17, 2022 by Pulumi

Job

Manages a job resource within a Dataproc cluster within GCE. For more information see the official dataproc documentation.

!> Note: This resource does not support ‘update’ and changing any attributes will cause the resource to be recreated.

Example Usage

using Pulumi;
using Gcp = Pulumi.Gcp;

class MyStack : Stack
{
    public MyStack()
    {
        var mycluster = new Gcp.Dataproc.Cluster("mycluster", new Gcp.Dataproc.ClusterArgs
        {
            Region = "us-central1",
        });
        // Submit an example spark job to a dataproc cluster
        var spark = new Gcp.Dataproc.Job("spark", new Gcp.Dataproc.JobArgs
        {
            Region = mycluster.Region,
            ForceDelete = true,
            Placement = new Gcp.Dataproc.Inputs.JobPlacementArgs
            {
                ClusterName = mycluster.Name,
            },
            SparkConfig = new Gcp.Dataproc.Inputs.JobSparkConfigArgs
            {
                MainClass = "org.apache.spark.examples.SparkPi",
                JarFileUris = 
                {
                    "file:///usr/lib/spark/examples/jars/spark-examples.jar",
                },
                Args = 
                {
                    "1000",
                },
                Properties = 
                {
                    { "spark.logConf", "true" },
                },
                LoggingConfig = new Gcp.Dataproc.Inputs.JobSparkConfigLoggingConfigArgs
                {
                    DriverLogLevels = 
                    {
                        { "root", "INFO" },
                    },
                },
            },
        });
        // Submit an example pyspark job to a dataproc cluster
        var pyspark = new Gcp.Dataproc.Job("pyspark", new Gcp.Dataproc.JobArgs
        {
            Region = mycluster.Region,
            ForceDelete = true,
            Placement = new Gcp.Dataproc.Inputs.JobPlacementArgs
            {
                ClusterName = mycluster.Name,
            },
            PysparkConfig = new Gcp.Dataproc.Inputs.JobPysparkConfigArgs
            {
                MainPythonFileUri = "gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py",
                Properties = 
                {
                    { "spark.logConf", "true" },
                },
            },
        });
        this.SparkStatus = spark.Statuses.Apply(statuses => statuses[0].State);
        this.PysparkStatus = pyspark.Statuses.Apply(statuses => statuses[0].State);
    }

    [Output("sparkStatus")]
    public Output<string> SparkStatus { get; set; }
    [Output("pysparkStatus")]
    public Output<string> PysparkStatus { get; set; }
}
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/dataproc"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		mycluster, err := dataproc.NewCluster(ctx, "mycluster", &dataproc.ClusterArgs{
			Region: pulumi.String("us-central1"),
		})
		if err != nil {
			return err
		}
		spark, err := dataproc.NewJob(ctx, "spark", &dataproc.JobArgs{
			Region:      mycluster.Region,
			ForceDelete: pulumi.Bool(true),
			Placement: &dataproc.JobPlacementArgs{
				ClusterName: mycluster.Name,
			},
			SparkConfig: &dataproc.JobSparkConfigArgs{
				MainClass: pulumi.String("org.apache.spark.examples.SparkPi"),
				JarFileUris: pulumi.StringArray{
					pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
				},
				Args: pulumi.StringArray{
					pulumi.String("1000"),
				},
				Properties: pulumi.StringMap{
					"spark.logConf": pulumi.String("true"),
				},
				LoggingConfig: &dataproc.JobSparkConfigLoggingConfigArgs{
					DriverLogLevels: pulumi.StringMap{
						"root": pulumi.String("INFO"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		pyspark, err := dataproc.NewJob(ctx, "pyspark", &dataproc.JobArgs{
			Region:      mycluster.Region,
			ForceDelete: pulumi.Bool(true),
			Placement: &dataproc.JobPlacementArgs{
				ClusterName: mycluster.Name,
			},
			PysparkConfig: &dataproc.JobPysparkConfigArgs{
				MainPythonFileUri: pulumi.String("gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py"),
				Properties: pulumi.StringMap{
					"spark.logConf": pulumi.String("true"),
				},
			},
		})
		if err != nil {
			return err
		}
		ctx.Export("sparkStatus", spark.Statuses.ApplyT(func(statuses []dataproc.JobStatus) (string, error) {
			return statuses[0].State, nil
		}).(pulumi.StringOutput))
		ctx.Export("pysparkStatus", pyspark.Statuses.ApplyT(func(statuses []dataproc.JobStatus) (string, error) {
			return statuses[0].State, nil
		}).(pulumi.StringOutput))
		return nil
	})
}
package generated_program;

import java.util.*;
import java.io.*;
import java.nio.*;
import com.pulumi.*;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var mycluster = new Cluster("mycluster", ClusterArgs.builder()        
            .region("us-central1")
            .build());

        var spark = new Job("spark", JobArgs.builder()        
            .region(mycluster.region())
            .forceDelete(true)
            .placement(JobPlacementArgs.builder()
                .clusterName(mycluster.name())
                .build())
            .sparkConfig(JobSparkConfigArgs.builder()
                .mainClass("org.apache.spark.examples.SparkPi")
                .jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
                .args("1000")
                .properties(Map.of("spark.logConf", "true"))
                .loggingConfig(JobSparkConfigLoggingConfigArgs.builder()
                    .driverLogLevels(Map.of("root", "INFO"))
                    .build())
                .build())
            .build());

        var pyspark = new Job("pyspark", JobArgs.builder()        
            .region(mycluster.region())
            .forceDelete(true)
            .placement(JobPlacementArgs.builder()
                .clusterName(mycluster.name())
                .build())
            .pysparkConfig(JobPysparkConfigArgs.builder()
                .mainPythonFileUri("gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py")
                .properties(Map.of("spark.logConf", "true"))
                .build())
            .build());

        ctx.export("sparkStatus", spark.statuses().apply(statuses -> statuses[0].state()));
        ctx.export("pysparkStatus", pyspark.statuses().apply(statuses -> statuses[0].state()));
    }
}
import pulumi
import pulumi_gcp as gcp

mycluster = gcp.dataproc.Cluster("mycluster", region="us-central1")
# Submit an example spark job to a dataproc cluster
spark = gcp.dataproc.Job("spark",
    region=mycluster.region,
    force_delete=True,
    placement=gcp.dataproc.JobPlacementArgs(
        cluster_name=mycluster.name,
    ),
    spark_config=gcp.dataproc.JobSparkConfigArgs(
        main_class="org.apache.spark.examples.SparkPi",
        jar_file_uris=["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        args=["1000"],
        properties={
            "spark.logConf": "true",
        },
        logging_config=gcp.dataproc.JobSparkConfigLoggingConfigArgs(
            driver_log_levels={
                "root": "INFO",
            },
        ),
    ))
# Submit an example pyspark job to a dataproc cluster
pyspark = gcp.dataproc.Job("pyspark",
    region=mycluster.region,
    force_delete=True,
    placement=gcp.dataproc.JobPlacementArgs(
        cluster_name=mycluster.name,
    ),
    pyspark_config=gcp.dataproc.JobPysparkConfigArgs(
        main_python_file_uri="gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py",
        properties={
            "spark.logConf": "true",
        },
    ))
pulumi.export("sparkStatus", spark.statuses[0].state)
pulumi.export("pysparkStatus", pyspark.statuses[0].state)
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const mycluster = new gcp.dataproc.Cluster("mycluster", {region: "us-central1"});
// Submit an example spark job to a dataproc cluster
const spark = new gcp.dataproc.Job("spark", {
    region: mycluster.region,
    forceDelete: true,
    placement: {
        clusterName: mycluster.name,
    },
    sparkConfig: {
        mainClass: "org.apache.spark.examples.SparkPi",
        jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        args: ["1000"],
        properties: {
            "spark.logConf": "true",
        },
        loggingConfig: {
            driverLogLevels: {
                root: "INFO",
            },
        },
    },
});
// Submit an example pyspark job to a dataproc cluster
const pyspark = new gcp.dataproc.Job("pyspark", {
    region: mycluster.region,
    forceDelete: true,
    placement: {
        clusterName: mycluster.name,
    },
    pysparkConfig: {
        mainPythonFileUri: "gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py",
        properties: {
            "spark.logConf": "true",
        },
    },
});
export const sparkStatus = spark.statuses.apply(statuses => statuses[0].state);
export const pysparkStatus = pyspark.statuses.apply(statuses => statuses[0].state);
resources:
  mycluster:
    type: gcp:dataproc:Cluster
    properties:
      region: us-central1
  spark:
    type: gcp:dataproc:Job
    properties:
      region: ${mycluster.region}
      forceDelete: true
      placement:
        clusterName: ${mycluster.name}
      sparkConfig:
        mainClass: org.apache.spark.examples.SparkPi
        jarFileUris:
          - file:///usr/lib/spark/examples/jars/spark-examples.jar
        args:
          - 1000
        properties:
          spark.logConf: true
        loggingConfig:
          driverLogLevels:
            root: INFO
  pyspark:
    type: gcp:dataproc:Job
    properties:
      region: ${mycluster.region}
      forceDelete: true
      placement:
        clusterName: ${mycluster.name}
      pysparkConfig:
        mainPythonFileUri: gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py
        properties:
          spark.logConf: true
outputs:
  sparkStatus: ${spark.statuses[0].state}
  pysparkStatus: ${pyspark.statuses[0].state}

Create a Job Resource

new Job(name: string, args: JobArgs, opts?: CustomResourceOptions);
@overload
def Job(resource_name: str,
        opts: Optional[ResourceOptions] = None,
        force_delete: Optional[bool] = None,
        hadoop_config: Optional[JobHadoopConfigArgs] = None,
        hive_config: Optional[JobHiveConfigArgs] = None,
        labels: Optional[Mapping[str, str]] = None,
        pig_config: Optional[JobPigConfigArgs] = None,
        placement: Optional[JobPlacementArgs] = None,
        presto_config: Optional[JobPrestoConfigArgs] = None,
        project: Optional[str] = None,
        pyspark_config: Optional[JobPysparkConfigArgs] = None,
        reference: Optional[JobReferenceArgs] = None,
        region: Optional[str] = None,
        scheduling: Optional[JobSchedulingArgs] = None,
        spark_config: Optional[JobSparkConfigArgs] = None,
        sparksql_config: Optional[JobSparksqlConfigArgs] = None)
@overload
def Job(resource_name: str,
        args: JobArgs,
        opts: Optional[ResourceOptions] = None)
func NewJob(ctx *Context, name string, args JobArgs, opts ...ResourceOption) (*Job, error)
public Job(string name, JobArgs args, CustomResourceOptions? opts = null)
public Job(String name, JobArgs args)
public Job(String name, JobArgs args, CustomResourceOptions options)
type: gcp:dataproc:Job
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args JobArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args JobArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args JobArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args JobArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args JobArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Job Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The Job resource accepts the following input properties:

Placement JobPlacementArgs

The config of job placement.

ForceDelete bool

By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.

HadoopConfig JobHadoopConfigArgs

The config of Hadoop job

HiveConfig JobHiveConfigArgs

The config of hive job

Labels Dictionary<string, string>

The list of labels (key/value pairs) to add to the job.

PigConfig JobPigConfigArgs

The config of pag job.

PrestoConfig JobPrestoConfigArgs

The config of presto job

Project string

The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.

PysparkConfig JobPysparkConfigArgs

The config of pySpark job.

Reference JobReferenceArgs

The reference of the job

Region string

The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.

Scheduling JobSchedulingArgs

Optional. Job scheduling configuration.

SparkConfig JobSparkConfigArgs

The config of the Spark job.

SparksqlConfig JobSparksqlConfigArgs

The config of SparkSql job

Placement JobPlacementArgs

The config of job placement.

ForceDelete bool

By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.

HadoopConfig JobHadoopConfigArgs

The config of Hadoop job

HiveConfig JobHiveConfigArgs

The config of hive job

Labels map[string]string

The list of labels (key/value pairs) to add to the job.

PigConfig JobPigConfigArgs

The config of pag job.

PrestoConfig JobPrestoConfigArgs

The config of presto job

Project string

The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.

PysparkConfig JobPysparkConfigArgs

The config of pySpark job.

Reference JobReferenceArgs

The reference of the job

Region string

The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.

Scheduling JobSchedulingArgs

Optional. Job scheduling configuration.

SparkConfig JobSparkConfigArgs

The config of the Spark job.

SparksqlConfig JobSparksqlConfigArgs

The config of SparkSql job

placement JobPlacementArgs

The config of job placement.

forceDelete Boolean

By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.

hadoopConfig JobHadoopConfigArgs

The config of Hadoop job

hiveConfig JobHiveConfigArgs

The config of hive job

labels Map<String,String>

The list of labels (key/value pairs) to add to the job.

pigConfig JobPigConfigArgs

The config of pag job.

prestoConfig JobPrestoConfigArgs

The config of presto job

project String

The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.

pysparkConfig JobPysparkConfigArgs

The config of pySpark job.

reference JobReferenceArgs

The reference of the job

region String

The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.

scheduling JobSchedulingArgs

Optional. Job scheduling configuration.

sparkConfig JobSparkConfigArgs

The config of the Spark job.

sparksqlConfig JobSparksqlConfigArgs

The config of SparkSql job

placement JobPlacementArgs

The config of job placement.

forceDelete boolean

By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.

hadoopConfig JobHadoopConfigArgs

The config of Hadoop job

hiveConfig JobHiveConfigArgs

The config of hive job

labels {[key: string]: string}

The list of labels (key/value pairs) to add to the job.

pigConfig JobPigConfigArgs

The config of pag job.

prestoConfig JobPrestoConfigArgs

The config of presto job

project string

The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.

pysparkConfig JobPysparkConfigArgs

The config of pySpark job.

reference JobReferenceArgs

The reference of the job

region string

The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.

scheduling JobSchedulingArgs

Optional. Job scheduling configuration.

sparkConfig JobSparkConfigArgs

The config of the Spark job.

sparksqlConfig JobSparksqlConfigArgs

The config of SparkSql job

placement JobPlacementArgs

The config of job placement.

force_delete bool

By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.

hadoop_config JobHadoopConfigArgs

The config of Hadoop job

hive_config JobHiveConfigArgs

The config of hive job

labels Mapping[str, str]

The list of labels (key/value pairs) to add to the job.

pig_config JobPigConfigArgs

The config of pag job.

presto_config JobPrestoConfigArgs

The config of presto job

project str

The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.

pyspark_config JobPysparkConfigArgs

The config of pySpark job.

reference JobReferenceArgs

The reference of the job

region str

The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.

scheduling JobSchedulingArgs

Optional. Job scheduling configuration.

spark_config JobSparkConfigArgs

The config of the Spark job.

sparksql_config JobSparksqlConfigArgs

The config of SparkSql job

placement Property Map

The config of job placement.

forceDelete Boolean

By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.

hadoopConfig Property Map

The config of Hadoop job

hiveConfig Property Map

The config of hive job

labels Map<String>

The list of labels (key/value pairs) to add to the job.

pigConfig Property Map

The config of pag job.

prestoConfig Property Map

The config of presto job

project String

The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.

pysparkConfig Property Map

The config of pySpark job.

reference Property Map

The reference of the job

region String

The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.

scheduling Property Map

Optional. Job scheduling configuration.

sparkConfig Property Map

The config of the Spark job.

sparksqlConfig Property Map

The config of SparkSql job

Outputs

All input properties are implicitly available as output properties. Additionally, the Job resource produces the following output properties:

DriverControlsFilesUri string

If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.

DriverOutputResourceUri string

A URI pointing to the location of the stdout of the job's driver program.

Id string

The provider-assigned unique ID for this managed resource.

Statuses List<JobStatus>

The status of the job.

DriverControlsFilesUri string

If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.

DriverOutputResourceUri string

A URI pointing to the location of the stdout of the job's driver program.

Id string

The provider-assigned unique ID for this managed resource.

Statuses []JobStatus

The status of the job.

driverControlsFilesUri String

If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.

driverOutputResourceUri String

A URI pointing to the location of the stdout of the job's driver program.

id String

The provider-assigned unique ID for this managed resource.

statuses List<JobStatus>

The status of the job.

driverControlsFilesUri string

If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.

driverOutputResourceUri string

A URI pointing to the location of the stdout of the job's driver program.

id string

The provider-assigned unique ID for this managed resource.

statuses JobStatus[]

The status of the job.

driver_controls_files_uri str

If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.

driver_output_resource_uri str

A URI pointing to the location of the stdout of the job's driver program.

id str

The provider-assigned unique ID for this managed resource.

statuses Sequence[JobStatus]

The status of the job.

driverControlsFilesUri String

If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.

driverOutputResourceUri String

A URI pointing to the location of the stdout of the job's driver program.

id String

The provider-assigned unique ID for this managed resource.

statuses List<Property Map>

The status of the job.

Look up an Existing Job Resource

Get an existing Job resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: JobState, opts?: CustomResourceOptions): Job
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        driver_controls_files_uri: Optional[str] = None,
        driver_output_resource_uri: Optional[str] = None,
        force_delete: Optional[bool] = None,
        hadoop_config: Optional[JobHadoopConfigArgs] = None,
        hive_config: Optional[JobHiveConfigArgs] = None,
        labels: Optional[Mapping[str, str]] = None,
        pig_config: Optional[JobPigConfigArgs] = None,
        placement: Optional[JobPlacementArgs] = None,
        presto_config: Optional[JobPrestoConfigArgs] = None,
        project: Optional[str] = None,
        pyspark_config: Optional[JobPysparkConfigArgs] = None,
        reference: Optional[JobReferenceArgs] = None,
        region: Optional[str] = None,
        scheduling: Optional[JobSchedulingArgs] = None,
        spark_config: Optional[JobSparkConfigArgs] = None,
        sparksql_config: Optional[JobSparksqlConfigArgs] = None,
        statuses: Optional[Sequence[JobStatusArgs]] = None) -> Job
func GetJob(ctx *Context, name string, id IDInput, state *JobState, opts ...ResourceOption) (*Job, error)
public static Job Get(string name, Input<string> id, JobState? state, CustomResourceOptions? opts = null)
public static Job get(String name, Output<String> id, JobState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
DriverControlsFilesUri string

If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.

DriverOutputResourceUri string

A URI pointing to the location of the stdout of the job's driver program.

ForceDelete bool

By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.

HadoopConfig JobHadoopConfigArgs

The config of Hadoop job

HiveConfig JobHiveConfigArgs

The config of hive job

Labels Dictionary<string, string>

The list of labels (key/value pairs) to add to the job.

PigConfig JobPigConfigArgs

The config of pag job.

Placement JobPlacementArgs

The config of job placement.

PrestoConfig JobPrestoConfigArgs

The config of presto job

Project string

The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.

PysparkConfig JobPysparkConfigArgs

The config of pySpark job.

Reference JobReferenceArgs

The reference of the job

Region string

The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.

Scheduling JobSchedulingArgs

Optional. Job scheduling configuration.

SparkConfig JobSparkConfigArgs

The config of the Spark job.

SparksqlConfig JobSparksqlConfigArgs

The config of SparkSql job

Statuses List<JobStatusArgs>

The status of the job.

DriverControlsFilesUri string

If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.

DriverOutputResourceUri string

A URI pointing to the location of the stdout of the job's driver program.

ForceDelete bool

By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.

HadoopConfig JobHadoopConfigArgs

The config of Hadoop job

HiveConfig JobHiveConfigArgs

The config of hive job

Labels map[string]string

The list of labels (key/value pairs) to add to the job.

PigConfig JobPigConfigArgs

The config of pag job.

Placement JobPlacementArgs

The config of job placement.

PrestoConfig JobPrestoConfigArgs

The config of presto job

Project string

The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.

PysparkConfig JobPysparkConfigArgs

The config of pySpark job.

Reference JobReferenceArgs

The reference of the job

Region string

The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.

Scheduling JobSchedulingArgs

Optional. Job scheduling configuration.

SparkConfig JobSparkConfigArgs

The config of the Spark job.

SparksqlConfig JobSparksqlConfigArgs

The config of SparkSql job

Statuses []JobStatusArgs

The status of the job.

driverControlsFilesUri String

If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.

driverOutputResourceUri String

A URI pointing to the location of the stdout of the job's driver program.

forceDelete Boolean

By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.

hadoopConfig JobHadoopConfigArgs

The config of Hadoop job

hiveConfig JobHiveConfigArgs

The config of hive job

labels Map<String,String>

The list of labels (key/value pairs) to add to the job.

pigConfig JobPigConfigArgs

The config of pag job.

placement JobPlacementArgs

The config of job placement.

prestoConfig JobPrestoConfigArgs

The config of presto job

project String

The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.

pysparkConfig JobPysparkConfigArgs

The config of pySpark job.

reference JobReferenceArgs

The reference of the job

region String

The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.

scheduling JobSchedulingArgs

Optional. Job scheduling configuration.

sparkConfig JobSparkConfigArgs

The config of the Spark job.

sparksqlConfig JobSparksqlConfigArgs

The config of SparkSql job

statuses List<JobStatusArgs>

The status of the job.

driverControlsFilesUri string

If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.

driverOutputResourceUri string

A URI pointing to the location of the stdout of the job's driver program.

forceDelete boolean

By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.

hadoopConfig JobHadoopConfigArgs

The config of Hadoop job

hiveConfig JobHiveConfigArgs

The config of hive job

labels {[key: string]: string}

The list of labels (key/value pairs) to add to the job.

pigConfig JobPigConfigArgs

The config of pag job.

placement JobPlacementArgs

The config of job placement.

prestoConfig JobPrestoConfigArgs

The config of presto job

project string

The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.

pysparkConfig JobPysparkConfigArgs

The config of pySpark job.

reference JobReferenceArgs

The reference of the job

region string

The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.

scheduling JobSchedulingArgs

Optional. Job scheduling configuration.

sparkConfig JobSparkConfigArgs

The config of the Spark job.

sparksqlConfig JobSparksqlConfigArgs

The config of SparkSql job

statuses JobStatusArgs[]

The status of the job.

driver_controls_files_uri str

If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.

driver_output_resource_uri str

A URI pointing to the location of the stdout of the job's driver program.

force_delete bool

By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.

hadoop_config JobHadoopConfigArgs

The config of Hadoop job

hive_config JobHiveConfigArgs

The config of hive job

labels Mapping[str, str]

The list of labels (key/value pairs) to add to the job.

pig_config JobPigConfigArgs

The config of pag job.

placement JobPlacementArgs

The config of job placement.

presto_config JobPrestoConfigArgs

The config of presto job

project str

The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.

pyspark_config JobPysparkConfigArgs

The config of pySpark job.

reference JobReferenceArgs

The reference of the job

region str

The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.

scheduling JobSchedulingArgs

Optional. Job scheduling configuration.

spark_config JobSparkConfigArgs

The config of the Spark job.

sparksql_config JobSparksqlConfigArgs

The config of SparkSql job

statuses Sequence[JobStatusArgs]

The status of the job.

driverControlsFilesUri String

If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.

driverOutputResourceUri String

A URI pointing to the location of the stdout of the job's driver program.

forceDelete Boolean

By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.

hadoopConfig Property Map

The config of Hadoop job

hiveConfig Property Map

The config of hive job

labels Map<String>

The list of labels (key/value pairs) to add to the job.

pigConfig Property Map

The config of pag job.

placement Property Map

The config of job placement.

prestoConfig Property Map

The config of presto job

project String

The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.

pysparkConfig Property Map

The config of pySpark job.

reference Property Map

The reference of the job

region String

The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.

scheduling Property Map

Optional. Job scheduling configuration.

sparkConfig Property Map

The config of the Spark job.

sparksqlConfig Property Map

The config of SparkSql job

statuses List<Property Map>

The status of the job.

Supporting Types

JobHadoopConfig

ArchiveUris List<string>

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

Args List<string>

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

FileUris List<string>

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

JarFileUris List<string>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

LoggingConfig JobHadoopConfigLoggingConfig
MainClass string

The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri

MainJarFileUri string

The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class

Properties Dictionary<string, string>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

ArchiveUris []string

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

Args []string

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

FileUris []string

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

JarFileUris []string

HCFS URIs of jar files to be added to the Spark CLASSPATH.

LoggingConfig JobHadoopConfigLoggingConfig
MainClass string

The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri

MainJarFileUri string

The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class

Properties map[string]string

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

archiveUris List<String>

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

args List<String>

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

fileUris List<String>

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

jarFileUris List<String>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig JobHadoopConfigLoggingConfig
mainClass String

The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri

mainJarFileUri String

The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class

properties Map<String,String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

archiveUris string[]

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

args string[]

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

fileUris string[]

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

jarFileUris string[]

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig JobHadoopConfigLoggingConfig
mainClass string

The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri

mainJarFileUri string

The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class

properties {[key: string]: string}

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

archive_uris Sequence[str]

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

args Sequence[str]

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

file_uris Sequence[str]

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

jar_file_uris Sequence[str]

HCFS URIs of jar files to be added to the Spark CLASSPATH.

logging_config JobHadoopConfigLoggingConfig
main_class str

The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri

main_jar_file_uri str

The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class

properties Mapping[str, str]

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

archiveUris List<String>

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

args List<String>

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

fileUris List<String>

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

jarFileUris List<String>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig Property Map
mainClass String

The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri

mainJarFileUri String

The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class

properties Map<String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

JobHadoopConfigLoggingConfig

DriverLogLevels Dictionary<string, string>
DriverLogLevels map[string]string
driverLogLevels Map<String,String>
driverLogLevels {[key: string]: string}
driver_log_levels Mapping[str, str]
driverLogLevels Map<String>

JobHiveConfig

ContinueOnFailure bool

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

JarFileUris List<string>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

Properties Dictionary<string, string>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

QueryFileUri string

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

QueryLists List<string>

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

ScriptVariables Dictionary<string, string>

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

ContinueOnFailure bool

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

JarFileUris []string

HCFS URIs of jar files to be added to the Spark CLASSPATH.

Properties map[string]string

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

QueryFileUri string

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

QueryLists []string

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

ScriptVariables map[string]string

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

continueOnFailure Boolean

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

jarFileUris List<String>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

properties Map<String,String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

queryFileUri String

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

queryLists List<String>

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

scriptVariables Map<String,String>

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

continueOnFailure boolean

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

jarFileUris string[]

HCFS URIs of jar files to be added to the Spark CLASSPATH.

properties {[key: string]: string}

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

queryFileUri string

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

queryLists string[]

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

scriptVariables {[key: string]: string}

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

continue_on_failure bool

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

jar_file_uris Sequence[str]

HCFS URIs of jar files to be added to the Spark CLASSPATH.

properties Mapping[str, str]

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

query_file_uri str

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

query_lists Sequence[str]

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

script_variables Mapping[str, str]

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

continueOnFailure Boolean

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

jarFileUris List<String>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

properties Map<String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

queryFileUri String

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

queryLists List<String>

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

scriptVariables Map<String>

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

JobPigConfig

ContinueOnFailure bool

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

JarFileUris List<string>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

LoggingConfig JobPigConfigLoggingConfig
Properties Dictionary<string, string>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

QueryFileUri string

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

QueryLists List<string>

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

ScriptVariables Dictionary<string, string>

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

ContinueOnFailure bool

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

JarFileUris []string

HCFS URIs of jar files to be added to the Spark CLASSPATH.

LoggingConfig JobPigConfigLoggingConfig
Properties map[string]string

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

QueryFileUri string

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

QueryLists []string

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

ScriptVariables map[string]string

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

continueOnFailure Boolean

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

jarFileUris List<String>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig JobPigConfigLoggingConfig
properties Map<String,String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

queryFileUri String

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

queryLists List<String>

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

scriptVariables Map<String,String>

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

continueOnFailure boolean

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

jarFileUris string[]

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig JobPigConfigLoggingConfig
properties {[key: string]: string}

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

queryFileUri string

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

queryLists string[]

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

scriptVariables {[key: string]: string}

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

continue_on_failure bool

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

jar_file_uris Sequence[str]

HCFS URIs of jar files to be added to the Spark CLASSPATH.

logging_config JobPigConfigLoggingConfig
properties Mapping[str, str]

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

query_file_uri str

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

query_lists Sequence[str]

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

script_variables Mapping[str, str]

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

continueOnFailure Boolean

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

jarFileUris List<String>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig Property Map
properties Map<String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

queryFileUri String

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

queryLists List<String>

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

scriptVariables Map<String>

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

JobPigConfigLoggingConfig

DriverLogLevels Dictionary<string, string>
DriverLogLevels map[string]string
driverLogLevels Map<String,String>
driverLogLevels {[key: string]: string}
driver_log_levels Mapping[str, str]
driverLogLevels Map<String>

JobPlacement

JobPrestoConfig

ClientTags List<string>

Presto client tags to attach to this query.

ContinueOnFailure bool

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

LoggingConfig JobPrestoConfigLoggingConfig
OutputFormat string

The format in which query output will be displayed. See the Presto documentation for supported output formats.

Properties Dictionary<string, string>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

QueryFileUri string

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

QueryLists List<string>

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

ClientTags []string

Presto client tags to attach to this query.

ContinueOnFailure bool

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

LoggingConfig JobPrestoConfigLoggingConfig
OutputFormat string

The format in which query output will be displayed. See the Presto documentation for supported output formats.

Properties map[string]string

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

QueryFileUri string

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

QueryLists []string

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

clientTags List<String>

Presto client tags to attach to this query.

continueOnFailure Boolean

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

loggingConfig JobPrestoConfigLoggingConfig
outputFormat String

The format in which query output will be displayed. See the Presto documentation for supported output formats.

properties Map<String,String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

queryFileUri String

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

queryLists List<String>

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

clientTags string[]

Presto client tags to attach to this query.

continueOnFailure boolean

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

loggingConfig JobPrestoConfigLoggingConfig
outputFormat string

The format in which query output will be displayed. See the Presto documentation for supported output formats.

properties {[key: string]: string}

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

queryFileUri string

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

queryLists string[]

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

client_tags Sequence[str]

Presto client tags to attach to this query.

continue_on_failure bool

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

logging_config JobPrestoConfigLoggingConfig
output_format str

The format in which query output will be displayed. See the Presto documentation for supported output formats.

properties Mapping[str, str]

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

query_file_uri str

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

query_lists Sequence[str]

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

clientTags List<String>

Presto client tags to attach to this query.

continueOnFailure Boolean

Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.

loggingConfig Property Map
outputFormat String

The format in which query output will be displayed. See the Presto documentation for supported output formats.

properties Map<String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

queryFileUri String

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

queryLists List<String>

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

JobPrestoConfigLoggingConfig

DriverLogLevels Dictionary<string, string>
DriverLogLevels map[string]string
driverLogLevels Map<String,String>
driverLogLevels {[key: string]: string}
driver_log_levels Mapping[str, str]
driverLogLevels Map<String>

JobPysparkConfig

MainPythonFileUri string

The HCFS URI of the main Python file to use as the driver. Must be a .py file.

ArchiveUris List<string>

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

Args List<string>

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

FileUris List<string>

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

JarFileUris List<string>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

LoggingConfig JobPysparkConfigLoggingConfig
Properties Dictionary<string, string>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

PythonFileUris List<string>

HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.

MainPythonFileUri string

The HCFS URI of the main Python file to use as the driver. Must be a .py file.

ArchiveUris []string

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

Args []string

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

FileUris []string

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

JarFileUris []string

HCFS URIs of jar files to be added to the Spark CLASSPATH.

LoggingConfig JobPysparkConfigLoggingConfig
Properties map[string]string

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

PythonFileUris []string

HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.

mainPythonFileUri String

The HCFS URI of the main Python file to use as the driver. Must be a .py file.

archiveUris List<String>

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

args List<String>

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

fileUris List<String>

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

jarFileUris List<String>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig JobPysparkConfigLoggingConfig
properties Map<String,String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

pythonFileUris List<String>

HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.

mainPythonFileUri string

The HCFS URI of the main Python file to use as the driver. Must be a .py file.

archiveUris string[]

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

args string[]

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

fileUris string[]

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

jarFileUris string[]

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig JobPysparkConfigLoggingConfig
properties {[key: string]: string}

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

pythonFileUris string[]

HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.

main_python_file_uri str

The HCFS URI of the main Python file to use as the driver. Must be a .py file.

archive_uris Sequence[str]

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

args Sequence[str]

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

file_uris Sequence[str]

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

jar_file_uris Sequence[str]

HCFS URIs of jar files to be added to the Spark CLASSPATH.

logging_config JobPysparkConfigLoggingConfig
properties Mapping[str, str]

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

python_file_uris Sequence[str]

HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.

mainPythonFileUri String

The HCFS URI of the main Python file to use as the driver. Must be a .py file.

archiveUris List<String>

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

args List<String>

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

fileUris List<String>

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

jarFileUris List<String>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig Property Map
properties Map<String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

pythonFileUris List<String>

HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.

JobPysparkConfigLoggingConfig

DriverLogLevels Dictionary<string, string>
DriverLogLevels map[string]string
driverLogLevels Map<String,String>
driverLogLevels {[key: string]: string}
driver_log_levels Mapping[str, str]
driverLogLevels Map<String>

JobReference

JobId string
JobId string
jobId String
jobId string
job_id str
jobId String

JobScheduling

JobSparkConfig

ArchiveUris List<string>

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

Args List<string>

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

FileUris List<string>

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

JarFileUris List<string>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

LoggingConfig JobSparkConfigLoggingConfig
MainClass string

The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri

MainJarFileUri string

The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class

Properties Dictionary<string, string>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

ArchiveUris []string

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

Args []string

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

FileUris []string

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

JarFileUris []string

HCFS URIs of jar files to be added to the Spark CLASSPATH.

LoggingConfig JobSparkConfigLoggingConfig
MainClass string

The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri

MainJarFileUri string

The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class

Properties map[string]string

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

archiveUris List<String>

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

args List<String>

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

fileUris List<String>

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

jarFileUris List<String>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig JobSparkConfigLoggingConfig
mainClass String

The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri

mainJarFileUri String

The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class

properties Map<String,String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

archiveUris string[]

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

args string[]

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

fileUris string[]

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

jarFileUris string[]

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig JobSparkConfigLoggingConfig
mainClass string

The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri

mainJarFileUri string

The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class

properties {[key: string]: string}

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

archive_uris Sequence[str]

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

args Sequence[str]

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

file_uris Sequence[str]

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

jar_file_uris Sequence[str]

HCFS URIs of jar files to be added to the Spark CLASSPATH.

logging_config JobSparkConfigLoggingConfig
main_class str

The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri

main_jar_file_uri str

The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class

properties Mapping[str, str]

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

archiveUris List<String>

HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.

args List<String>

The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.

fileUris List<String>

HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.

jarFileUris List<String>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig Property Map
mainClass String

The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri

mainJarFileUri String

The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class

properties Map<String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

JobSparkConfigLoggingConfig

DriverLogLevels Dictionary<string, string>
DriverLogLevels map[string]string
driverLogLevels Map<String,String>
driverLogLevels {[key: string]: string}
driver_log_levels Mapping[str, str]
driverLogLevels Map<String>

JobSparksqlConfig

JarFileUris List<string>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

LoggingConfig JobSparksqlConfigLoggingConfig
Properties Dictionary<string, string>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

QueryFileUri string

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

QueryLists List<string>

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

ScriptVariables Dictionary<string, string>

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

JarFileUris []string

HCFS URIs of jar files to be added to the Spark CLASSPATH.

LoggingConfig JobSparksqlConfigLoggingConfig
Properties map[string]string

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

QueryFileUri string

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

QueryLists []string

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

ScriptVariables map[string]string

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

jarFileUris List<String>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig JobSparksqlConfigLoggingConfig
properties Map<String,String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

queryFileUri String

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

queryLists List<String>

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

scriptVariables Map<String,String>

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

jarFileUris string[]

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig JobSparksqlConfigLoggingConfig
properties {[key: string]: string}

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

queryFileUri string

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

queryLists string[]

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

scriptVariables {[key: string]: string}

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

jar_file_uris Sequence[str]

HCFS URIs of jar files to be added to the Spark CLASSPATH.

logging_config JobSparksqlConfigLoggingConfig
properties Mapping[str, str]

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

query_file_uri str

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

query_lists Sequence[str]

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

script_variables Mapping[str, str]

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

jarFileUris List<String>

HCFS URIs of jar files to be added to the Spark CLASSPATH.

loggingConfig Property Map
properties Map<String>

A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.

queryFileUri String

The HCFS URI of the script that contains SQL queries. Conflicts with query_list

queryLists List<String>

The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

scriptVariables Map<String>

Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

JobSparksqlConfigLoggingConfig

DriverLogLevels Dictionary<string, string>
DriverLogLevels map[string]string
driverLogLevels Map<String,String>
driverLogLevels {[key: string]: string}
driver_log_levels Mapping[str, str]
driverLogLevels Map<String>

JobStatus

Details string
State string
StateStartTime string
Substate string
Details string
State string
StateStartTime string
Substate string
details String
state String
stateStartTime String
substate String
details string
state string
stateStartTime string
substate string
details String
state String
stateStartTime String
substate String

Import

This resource does not support import.

Package Details

Repository
https://github.com/pulumi/pulumi-gcp
License
Apache-2.0
Notes

This Pulumi package is based on the google-beta Terraform Provider.