1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. dataproc
  5. Job
Google Cloud Classic v7.19.0 published on Thursday, Apr 18, 2024 by Pulumi

gcp.dataproc.Job

Explore with Pulumi AI

gcp logo
Google Cloud Classic v7.19.0 published on Thursday, Apr 18, 2024 by Pulumi

    Manages a job resource within a Dataproc cluster within GCE. For more information see the official dataproc documentation.

    !> Note: This resource does not support ‘update’ and changing any attributes will cause the resource to be recreated.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const mycluster = new gcp.dataproc.Cluster("mycluster", {
        name: "dproc-cluster-unique-name",
        region: "us-central1",
    });
    // Submit an example spark job to a dataproc cluster
    const spark = new gcp.dataproc.Job("spark", {
        region: mycluster.region,
        forceDelete: true,
        placement: {
            clusterName: mycluster.name,
        },
        sparkConfig: {
            mainClass: "org.apache.spark.examples.SparkPi",
            jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
            args: ["1000"],
            properties: {
                "spark.logConf": "true",
            },
            loggingConfig: {
                driverLogLevels: {
                    root: "INFO",
                },
            },
        },
    });
    // Submit an example pyspark job to a dataproc cluster
    const pyspark = new gcp.dataproc.Job("pyspark", {
        region: mycluster.region,
        forceDelete: true,
        placement: {
            clusterName: mycluster.name,
        },
        pysparkConfig: {
            mainPythonFileUri: "gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py",
            properties: {
                "spark.logConf": "true",
            },
        },
    });
    export const sparkStatus = spark.statuses.apply(statuses => statuses[0].state);
    export const pysparkStatus = pyspark.statuses.apply(statuses => statuses[0].state);
    
    import pulumi
    import pulumi_gcp as gcp
    
    mycluster = gcp.dataproc.Cluster("mycluster",
        name="dproc-cluster-unique-name",
        region="us-central1")
    # Submit an example spark job to a dataproc cluster
    spark = gcp.dataproc.Job("spark",
        region=mycluster.region,
        force_delete=True,
        placement=gcp.dataproc.JobPlacementArgs(
            cluster_name=mycluster.name,
        ),
        spark_config=gcp.dataproc.JobSparkConfigArgs(
            main_class="org.apache.spark.examples.SparkPi",
            jar_file_uris=["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
            args=["1000"],
            properties={
                "spark.logConf": "true",
            },
            logging_config=gcp.dataproc.JobSparkConfigLoggingConfigArgs(
                driver_log_levels={
                    "root": "INFO",
                },
            ),
        ))
    # Submit an example pyspark job to a dataproc cluster
    pyspark = gcp.dataproc.Job("pyspark",
        region=mycluster.region,
        force_delete=True,
        placement=gcp.dataproc.JobPlacementArgs(
            cluster_name=mycluster.name,
        ),
        pyspark_config=gcp.dataproc.JobPysparkConfigArgs(
            main_python_file_uri="gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py",
            properties={
                "spark.logConf": "true",
            },
        ))
    pulumi.export("sparkStatus", spark.statuses[0].state)
    pulumi.export("pysparkStatus", pyspark.statuses[0].state)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataproc"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		mycluster, err := dataproc.NewCluster(ctx, "mycluster", &dataproc.ClusterArgs{
    			Name:   pulumi.String("dproc-cluster-unique-name"),
    			Region: pulumi.String("us-central1"),
    		})
    		if err != nil {
    			return err
    		}
    		// Submit an example spark job to a dataproc cluster
    		spark, err := dataproc.NewJob(ctx, "spark", &dataproc.JobArgs{
    			Region:      mycluster.Region,
    			ForceDelete: pulumi.Bool(true),
    			Placement: &dataproc.JobPlacementArgs{
    				ClusterName: mycluster.Name,
    			},
    			SparkConfig: &dataproc.JobSparkConfigArgs{
    				MainClass: pulumi.String("org.apache.spark.examples.SparkPi"),
    				JarFileUris: pulumi.StringArray{
    					pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
    				},
    				Args: pulumi.StringArray{
    					pulumi.String("1000"),
    				},
    				Properties: pulumi.StringMap{
    					"spark.logConf": pulumi.String("true"),
    				},
    				LoggingConfig: &dataproc.JobSparkConfigLoggingConfigArgs{
    					DriverLogLevels: pulumi.StringMap{
    						"root": pulumi.String("INFO"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		// Submit an example pyspark job to a dataproc cluster
    		pyspark, err := dataproc.NewJob(ctx, "pyspark", &dataproc.JobArgs{
    			Region:      mycluster.Region,
    			ForceDelete: pulumi.Bool(true),
    			Placement: &dataproc.JobPlacementArgs{
    				ClusterName: mycluster.Name,
    			},
    			PysparkConfig: &dataproc.JobPysparkConfigArgs{
    				MainPythonFileUri: pulumi.String("gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py"),
    				Properties: pulumi.StringMap{
    					"spark.logConf": pulumi.String("true"),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		ctx.Export("sparkStatus", spark.Statuses.ApplyT(func(statuses []dataproc.JobStatus) (*string, error) {
    			return &statuses[0].State, nil
    		}).(pulumi.StringPtrOutput))
    		ctx.Export("pysparkStatus", pyspark.Statuses.ApplyT(func(statuses []dataproc.JobStatus) (*string, error) {
    			return &statuses[0].State, nil
    		}).(pulumi.StringPtrOutput))
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var mycluster = new Gcp.Dataproc.Cluster("mycluster", new()
        {
            Name = "dproc-cluster-unique-name",
            Region = "us-central1",
        });
    
        // Submit an example spark job to a dataproc cluster
        var spark = new Gcp.Dataproc.Job("spark", new()
        {
            Region = mycluster.Region,
            ForceDelete = true,
            Placement = new Gcp.Dataproc.Inputs.JobPlacementArgs
            {
                ClusterName = mycluster.Name,
            },
            SparkConfig = new Gcp.Dataproc.Inputs.JobSparkConfigArgs
            {
                MainClass = "org.apache.spark.examples.SparkPi",
                JarFileUris = new[]
                {
                    "file:///usr/lib/spark/examples/jars/spark-examples.jar",
                },
                Args = new[]
                {
                    "1000",
                },
                Properties = 
                {
                    { "spark.logConf", "true" },
                },
                LoggingConfig = new Gcp.Dataproc.Inputs.JobSparkConfigLoggingConfigArgs
                {
                    DriverLogLevels = 
                    {
                        { "root", "INFO" },
                    },
                },
            },
        });
    
        // Submit an example pyspark job to a dataproc cluster
        var pyspark = new Gcp.Dataproc.Job("pyspark", new()
        {
            Region = mycluster.Region,
            ForceDelete = true,
            Placement = new Gcp.Dataproc.Inputs.JobPlacementArgs
            {
                ClusterName = mycluster.Name,
            },
            PysparkConfig = new Gcp.Dataproc.Inputs.JobPysparkConfigArgs
            {
                MainPythonFileUri = "gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py",
                Properties = 
                {
                    { "spark.logConf", "true" },
                },
            },
        });
    
        return new Dictionary<string, object?>
        {
            ["sparkStatus"] = spark.Statuses.Apply(statuses => statuses[0].State),
            ["pysparkStatus"] = pyspark.Statuses.Apply(statuses => statuses[0].State),
        };
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataproc.Cluster;
    import com.pulumi.gcp.dataproc.ClusterArgs;
    import com.pulumi.gcp.dataproc.Job;
    import com.pulumi.gcp.dataproc.JobArgs;
    import com.pulumi.gcp.dataproc.inputs.JobPlacementArgs;
    import com.pulumi.gcp.dataproc.inputs.JobSparkConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.JobSparkConfigLoggingConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.JobPysparkConfigArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var mycluster = new Cluster("mycluster", ClusterArgs.builder()        
                .name("dproc-cluster-unique-name")
                .region("us-central1")
                .build());
    
            // Submit an example spark job to a dataproc cluster
            var spark = new Job("spark", JobArgs.builder()        
                .region(mycluster.region())
                .forceDelete(true)
                .placement(JobPlacementArgs.builder()
                    .clusterName(mycluster.name())
                    .build())
                .sparkConfig(JobSparkConfigArgs.builder()
                    .mainClass("org.apache.spark.examples.SparkPi")
                    .jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
                    .args("1000")
                    .properties(Map.of("spark.logConf", "true"))
                    .loggingConfig(JobSparkConfigLoggingConfigArgs.builder()
                        .driverLogLevels(Map.of("root", "INFO"))
                        .build())
                    .build())
                .build());
    
            // Submit an example pyspark job to a dataproc cluster
            var pyspark = new Job("pyspark", JobArgs.builder()        
                .region(mycluster.region())
                .forceDelete(true)
                .placement(JobPlacementArgs.builder()
                    .clusterName(mycluster.name())
                    .build())
                .pysparkConfig(JobPysparkConfigArgs.builder()
                    .mainPythonFileUri("gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py")
                    .properties(Map.of("spark.logConf", "true"))
                    .build())
                .build());
    
            ctx.export("sparkStatus", spark.statuses().applyValue(statuses -> statuses[0].state()));
            ctx.export("pysparkStatus", pyspark.statuses().applyValue(statuses -> statuses[0].state()));
        }
    }
    
    resources:
      mycluster:
        type: gcp:dataproc:Cluster
        properties:
          name: dproc-cluster-unique-name
          region: us-central1
      # Submit an example spark job to a dataproc cluster
      spark:
        type: gcp:dataproc:Job
        properties:
          region: ${mycluster.region}
          forceDelete: true
          placement:
            clusterName: ${mycluster.name}
          sparkConfig:
            mainClass: org.apache.spark.examples.SparkPi
            jarFileUris:
              - file:///usr/lib/spark/examples/jars/spark-examples.jar
            args:
              - '1000'
            properties:
              spark.logConf: 'true'
            loggingConfig:
              driverLogLevels:
                root: INFO
      # Submit an example pyspark job to a dataproc cluster
      pyspark:
        type: gcp:dataproc:Job
        properties:
          region: ${mycluster.region}
          forceDelete: true
          placement:
            clusterName: ${mycluster.name}
          pysparkConfig:
            mainPythonFileUri: gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py
            properties:
              spark.logConf: 'true'
    outputs:
      # Check out current state of the jobs
      sparkStatus: ${spark.statuses[0].state}
      pysparkStatus: ${pyspark.statuses[0].state}
    

    Create Job Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Job(name: string, args: JobArgs, opts?: CustomResourceOptions);
    @overload
    def Job(resource_name: str,
            args: JobArgs,
            opts: Optional[ResourceOptions] = None)
    
    @overload
    def Job(resource_name: str,
            opts: Optional[ResourceOptions] = None,
            placement: Optional[JobPlacementArgs] = None,
            presto_config: Optional[JobPrestoConfigArgs] = None,
            hive_config: Optional[JobHiveConfigArgs] = None,
            labels: Optional[Mapping[str, str]] = None,
            pig_config: Optional[JobPigConfigArgs] = None,
            hadoop_config: Optional[JobHadoopConfigArgs] = None,
            force_delete: Optional[bool] = None,
            project: Optional[str] = None,
            pyspark_config: Optional[JobPysparkConfigArgs] = None,
            reference: Optional[JobReferenceArgs] = None,
            region: Optional[str] = None,
            scheduling: Optional[JobSchedulingArgs] = None,
            spark_config: Optional[JobSparkConfigArgs] = None,
            sparksql_config: Optional[JobSparksqlConfigArgs] = None)
    func NewJob(ctx *Context, name string, args JobArgs, opts ...ResourceOption) (*Job, error)
    public Job(string name, JobArgs args, CustomResourceOptions? opts = null)
    public Job(String name, JobArgs args)
    public Job(String name, JobArgs args, CustomResourceOptions options)
    
    type: gcp:dataproc:Job
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Example

    The following reference example uses placeholder values for all input properties.

    var examplejobResourceResourceFromDataprocjob = new Gcp.Dataproc.Job("examplejobResourceResourceFromDataprocjob", new()
    {
        Placement = new Gcp.Dataproc.Inputs.JobPlacementArgs
        {
            ClusterName = "string",
            ClusterUuid = "string",
        },
        PrestoConfig = new Gcp.Dataproc.Inputs.JobPrestoConfigArgs
        {
            ClientTags = new[]
            {
                "string",
            },
            ContinueOnFailure = false,
            LoggingConfig = new Gcp.Dataproc.Inputs.JobPrestoConfigLoggingConfigArgs
            {
                DriverLogLevels = 
                {
                    { "string", "string" },
                },
            },
            OutputFormat = "string",
            Properties = 
            {
                { "string", "string" },
            },
            QueryFileUri = "string",
            QueryLists = new[]
            {
                "string",
            },
        },
        HiveConfig = new Gcp.Dataproc.Inputs.JobHiveConfigArgs
        {
            ContinueOnFailure = false,
            JarFileUris = new[]
            {
                "string",
            },
            Properties = 
            {
                { "string", "string" },
            },
            QueryFileUri = "string",
            QueryLists = new[]
            {
                "string",
            },
            ScriptVariables = 
            {
                { "string", "string" },
            },
        },
        Labels = 
        {
            { "string", "string" },
        },
        PigConfig = new Gcp.Dataproc.Inputs.JobPigConfigArgs
        {
            ContinueOnFailure = false,
            JarFileUris = new[]
            {
                "string",
            },
            LoggingConfig = new Gcp.Dataproc.Inputs.JobPigConfigLoggingConfigArgs
            {
                DriverLogLevels = 
                {
                    { "string", "string" },
                },
            },
            Properties = 
            {
                { "string", "string" },
            },
            QueryFileUri = "string",
            QueryLists = new[]
            {
                "string",
            },
            ScriptVariables = 
            {
                { "string", "string" },
            },
        },
        HadoopConfig = new Gcp.Dataproc.Inputs.JobHadoopConfigArgs
        {
            ArchiveUris = new[]
            {
                "string",
            },
            Args = new[]
            {
                "string",
            },
            FileUris = new[]
            {
                "string",
            },
            JarFileUris = new[]
            {
                "string",
            },
            LoggingConfig = new Gcp.Dataproc.Inputs.JobHadoopConfigLoggingConfigArgs
            {
                DriverLogLevels = 
                {
                    { "string", "string" },
                },
            },
            MainClass = "string",
            MainJarFileUri = "string",
            Properties = 
            {
                { "string", "string" },
            },
        },
        ForceDelete = false,
        Project = "string",
        PysparkConfig = new Gcp.Dataproc.Inputs.JobPysparkConfigArgs
        {
            MainPythonFileUri = "string",
            ArchiveUris = new[]
            {
                "string",
            },
            Args = new[]
            {
                "string",
            },
            FileUris = new[]
            {
                "string",
            },
            JarFileUris = new[]
            {
                "string",
            },
            LoggingConfig = new Gcp.Dataproc.Inputs.JobPysparkConfigLoggingConfigArgs
            {
                DriverLogLevels = 
                {
                    { "string", "string" },
                },
            },
            Properties = 
            {
                { "string", "string" },
            },
            PythonFileUris = new[]
            {
                "string",
            },
        },
        Reference = new Gcp.Dataproc.Inputs.JobReferenceArgs
        {
            JobId = "string",
        },
        Region = "string",
        Scheduling = new Gcp.Dataproc.Inputs.JobSchedulingArgs
        {
            MaxFailuresPerHour = 0,
            MaxFailuresTotal = 0,
        },
        SparkConfig = new Gcp.Dataproc.Inputs.JobSparkConfigArgs
        {
            ArchiveUris = new[]
            {
                "string",
            },
            Args = new[]
            {
                "string",
            },
            FileUris = new[]
            {
                "string",
            },
            JarFileUris = new[]
            {
                "string",
            },
            LoggingConfig = new Gcp.Dataproc.Inputs.JobSparkConfigLoggingConfigArgs
            {
                DriverLogLevels = 
                {
                    { "string", "string" },
                },
            },
            MainClass = "string",
            MainJarFileUri = "string",
            Properties = 
            {
                { "string", "string" },
            },
        },
        SparksqlConfig = new Gcp.Dataproc.Inputs.JobSparksqlConfigArgs
        {
            JarFileUris = new[]
            {
                "string",
            },
            LoggingConfig = new Gcp.Dataproc.Inputs.JobSparksqlConfigLoggingConfigArgs
            {
                DriverLogLevels = 
                {
                    { "string", "string" },
                },
            },
            Properties = 
            {
                { "string", "string" },
            },
            QueryFileUri = "string",
            QueryLists = new[]
            {
                "string",
            },
            ScriptVariables = 
            {
                { "string", "string" },
            },
        },
    });
    
    example, err := dataproc.NewJob(ctx, "examplejobResourceResourceFromDataprocjob", &dataproc.JobArgs{
    	Placement: &dataproc.JobPlacementArgs{
    		ClusterName: pulumi.String("string"),
    		ClusterUuid: pulumi.String("string"),
    	},
    	PrestoConfig: &dataproc.JobPrestoConfigArgs{
    		ClientTags: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		ContinueOnFailure: pulumi.Bool(false),
    		LoggingConfig: &dataproc.JobPrestoConfigLoggingConfigArgs{
    			DriverLogLevels: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    		},
    		OutputFormat: pulumi.String("string"),
    		Properties: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		QueryFileUri: pulumi.String("string"),
    		QueryLists: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    	},
    	HiveConfig: &dataproc.JobHiveConfigArgs{
    		ContinueOnFailure: pulumi.Bool(false),
    		JarFileUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		Properties: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		QueryFileUri: pulumi.String("string"),
    		QueryLists: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		ScriptVariables: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    	},
    	Labels: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	PigConfig: &dataproc.JobPigConfigArgs{
    		ContinueOnFailure: pulumi.Bool(false),
    		JarFileUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		LoggingConfig: &dataproc.JobPigConfigLoggingConfigArgs{
    			DriverLogLevels: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    		},
    		Properties: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		QueryFileUri: pulumi.String("string"),
    		QueryLists: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		ScriptVariables: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    	},
    	HadoopConfig: &dataproc.JobHadoopConfigArgs{
    		ArchiveUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		Args: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		FileUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		JarFileUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		LoggingConfig: &dataproc.JobHadoopConfigLoggingConfigArgs{
    			DriverLogLevels: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    		},
    		MainClass:      pulumi.String("string"),
    		MainJarFileUri: pulumi.String("string"),
    		Properties: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    	},
    	ForceDelete: pulumi.Bool(false),
    	Project:     pulumi.String("string"),
    	PysparkConfig: &dataproc.JobPysparkConfigArgs{
    		MainPythonFileUri: pulumi.String("string"),
    		ArchiveUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		Args: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		FileUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		JarFileUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		LoggingConfig: &dataproc.JobPysparkConfigLoggingConfigArgs{
    			DriverLogLevels: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    		},
    		Properties: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		PythonFileUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    	},
    	Reference: &dataproc.JobReferenceArgs{
    		JobId: pulumi.String("string"),
    	},
    	Region: pulumi.String("string"),
    	Scheduling: &dataproc.JobSchedulingArgs{
    		MaxFailuresPerHour: pulumi.Int(0),
    		MaxFailuresTotal:   pulumi.Int(0),
    	},
    	SparkConfig: &dataproc.JobSparkConfigArgs{
    		ArchiveUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		Args: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		FileUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		JarFileUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		LoggingConfig: &dataproc.JobSparkConfigLoggingConfigArgs{
    			DriverLogLevels: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    		},
    		MainClass:      pulumi.String("string"),
    		MainJarFileUri: pulumi.String("string"),
    		Properties: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    	},
    	SparksqlConfig: &dataproc.JobSparksqlConfigArgs{
    		JarFileUris: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		LoggingConfig: &dataproc.JobSparksqlConfigLoggingConfigArgs{
    			DriverLogLevels: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    		},
    		Properties: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		QueryFileUri: pulumi.String("string"),
    		QueryLists: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		ScriptVariables: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    	},
    })
    
    var examplejobResourceResourceFromDataprocjob = new Job("examplejobResourceResourceFromDataprocjob", JobArgs.builder()        
        .placement(JobPlacementArgs.builder()
            .clusterName("string")
            .clusterUuid("string")
            .build())
        .prestoConfig(JobPrestoConfigArgs.builder()
            .clientTags("string")
            .continueOnFailure(false)
            .loggingConfig(JobPrestoConfigLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .outputFormat("string")
            .properties(Map.of("string", "string"))
            .queryFileUri("string")
            .queryLists("string")
            .build())
        .hiveConfig(JobHiveConfigArgs.builder()
            .continueOnFailure(false)
            .jarFileUris("string")
            .properties(Map.of("string", "string"))
            .queryFileUri("string")
            .queryLists("string")
            .scriptVariables(Map.of("string", "string"))
            .build())
        .labels(Map.of("string", "string"))
        .pigConfig(JobPigConfigArgs.builder()
            .continueOnFailure(false)
            .jarFileUris("string")
            .loggingConfig(JobPigConfigLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .properties(Map.of("string", "string"))
            .queryFileUri("string")
            .queryLists("string")
            .scriptVariables(Map.of("string", "string"))
            .build())
        .hadoopConfig(JobHadoopConfigArgs.builder()
            .archiveUris("string")
            .args("string")
            .fileUris("string")
            .jarFileUris("string")
            .loggingConfig(JobHadoopConfigLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .mainClass("string")
            .mainJarFileUri("string")
            .properties(Map.of("string", "string"))
            .build())
        .forceDelete(false)
        .project("string")
        .pysparkConfig(JobPysparkConfigArgs.builder()
            .mainPythonFileUri("string")
            .archiveUris("string")
            .args("string")
            .fileUris("string")
            .jarFileUris("string")
            .loggingConfig(JobPysparkConfigLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .properties(Map.of("string", "string"))
            .pythonFileUris("string")
            .build())
        .reference(JobReferenceArgs.builder()
            .jobId("string")
            .build())
        .region("string")
        .scheduling(JobSchedulingArgs.builder()
            .maxFailuresPerHour(0)
            .maxFailuresTotal(0)
            .build())
        .sparkConfig(JobSparkConfigArgs.builder()
            .archiveUris("string")
            .args("string")
            .fileUris("string")
            .jarFileUris("string")
            .loggingConfig(JobSparkConfigLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .mainClass("string")
            .mainJarFileUri("string")
            .properties(Map.of("string", "string"))
            .build())
        .sparksqlConfig(JobSparksqlConfigArgs.builder()
            .jarFileUris("string")
            .loggingConfig(JobSparksqlConfigLoggingConfigArgs.builder()
                .driverLogLevels(Map.of("string", "string"))
                .build())
            .properties(Map.of("string", "string"))
            .queryFileUri("string")
            .queryLists("string")
            .scriptVariables(Map.of("string", "string"))
            .build())
        .build());
    
    examplejob_resource_resource_from_dataprocjob = gcp.dataproc.Job("examplejobResourceResourceFromDataprocjob",
        placement=gcp.dataproc.JobPlacementArgs(
            cluster_name="string",
            cluster_uuid="string",
        ),
        presto_config=gcp.dataproc.JobPrestoConfigArgs(
            client_tags=["string"],
            continue_on_failure=False,
            logging_config=gcp.dataproc.JobPrestoConfigLoggingConfigArgs(
                driver_log_levels={
                    "string": "string",
                },
            ),
            output_format="string",
            properties={
                "string": "string",
            },
            query_file_uri="string",
            query_lists=["string"],
        ),
        hive_config=gcp.dataproc.JobHiveConfigArgs(
            continue_on_failure=False,
            jar_file_uris=["string"],
            properties={
                "string": "string",
            },
            query_file_uri="string",
            query_lists=["string"],
            script_variables={
                "string": "string",
            },
        ),
        labels={
            "string": "string",
        },
        pig_config=gcp.dataproc.JobPigConfigArgs(
            continue_on_failure=False,
            jar_file_uris=["string"],
            logging_config=gcp.dataproc.JobPigConfigLoggingConfigArgs(
                driver_log_levels={
                    "string": "string",
                },
            ),
            properties={
                "string": "string",
            },
            query_file_uri="string",
            query_lists=["string"],
            script_variables={
                "string": "string",
            },
        ),
        hadoop_config=gcp.dataproc.JobHadoopConfigArgs(
            archive_uris=["string"],
            args=["string"],
            file_uris=["string"],
            jar_file_uris=["string"],
            logging_config=gcp.dataproc.JobHadoopConfigLoggingConfigArgs(
                driver_log_levels={
                    "string": "string",
                },
            ),
            main_class="string",
            main_jar_file_uri="string",
            properties={
                "string": "string",
            },
        ),
        force_delete=False,
        project="string",
        pyspark_config=gcp.dataproc.JobPysparkConfigArgs(
            main_python_file_uri="string",
            archive_uris=["string"],
            args=["string"],
            file_uris=["string"],
            jar_file_uris=["string"],
            logging_config=gcp.dataproc.JobPysparkConfigLoggingConfigArgs(
                driver_log_levels={
                    "string": "string",
                },
            ),
            properties={
                "string": "string",
            },
            python_file_uris=["string"],
        ),
        reference=gcp.dataproc.JobReferenceArgs(
            job_id="string",
        ),
        region="string",
        scheduling=gcp.dataproc.JobSchedulingArgs(
            max_failures_per_hour=0,
            max_failures_total=0,
        ),
        spark_config=gcp.dataproc.JobSparkConfigArgs(
            archive_uris=["string"],
            args=["string"],
            file_uris=["string"],
            jar_file_uris=["string"],
            logging_config=gcp.dataproc.JobSparkConfigLoggingConfigArgs(
                driver_log_levels={
                    "string": "string",
                },
            ),
            main_class="string",
            main_jar_file_uri="string",
            properties={
                "string": "string",
            },
        ),
        sparksql_config=gcp.dataproc.JobSparksqlConfigArgs(
            jar_file_uris=["string"],
            logging_config=gcp.dataproc.JobSparksqlConfigLoggingConfigArgs(
                driver_log_levels={
                    "string": "string",
                },
            ),
            properties={
                "string": "string",
            },
            query_file_uri="string",
            query_lists=["string"],
            script_variables={
                "string": "string",
            },
        ))
    
    const examplejobResourceResourceFromDataprocjob = new gcp.dataproc.Job("examplejobResourceResourceFromDataprocjob", {
        placement: {
            clusterName: "string",
            clusterUuid: "string",
        },
        prestoConfig: {
            clientTags: ["string"],
            continueOnFailure: false,
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            outputFormat: "string",
            properties: {
                string: "string",
            },
            queryFileUri: "string",
            queryLists: ["string"],
        },
        hiveConfig: {
            continueOnFailure: false,
            jarFileUris: ["string"],
            properties: {
                string: "string",
            },
            queryFileUri: "string",
            queryLists: ["string"],
            scriptVariables: {
                string: "string",
            },
        },
        labels: {
            string: "string",
        },
        pigConfig: {
            continueOnFailure: false,
            jarFileUris: ["string"],
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            properties: {
                string: "string",
            },
            queryFileUri: "string",
            queryLists: ["string"],
            scriptVariables: {
                string: "string",
            },
        },
        hadoopConfig: {
            archiveUris: ["string"],
            args: ["string"],
            fileUris: ["string"],
            jarFileUris: ["string"],
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            mainClass: "string",
            mainJarFileUri: "string",
            properties: {
                string: "string",
            },
        },
        forceDelete: false,
        project: "string",
        pysparkConfig: {
            mainPythonFileUri: "string",
            archiveUris: ["string"],
            args: ["string"],
            fileUris: ["string"],
            jarFileUris: ["string"],
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            properties: {
                string: "string",
            },
            pythonFileUris: ["string"],
        },
        reference: {
            jobId: "string",
        },
        region: "string",
        scheduling: {
            maxFailuresPerHour: 0,
            maxFailuresTotal: 0,
        },
        sparkConfig: {
            archiveUris: ["string"],
            args: ["string"],
            fileUris: ["string"],
            jarFileUris: ["string"],
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            mainClass: "string",
            mainJarFileUri: "string",
            properties: {
                string: "string",
            },
        },
        sparksqlConfig: {
            jarFileUris: ["string"],
            loggingConfig: {
                driverLogLevels: {
                    string: "string",
                },
            },
            properties: {
                string: "string",
            },
            queryFileUri: "string",
            queryLists: ["string"],
            scriptVariables: {
                string: "string",
            },
        },
    });
    
    type: gcp:dataproc:Job
    properties:
        forceDelete: false
        hadoopConfig:
            archiveUris:
                - string
            args:
                - string
            fileUris:
                - string
            jarFileUris:
                - string
            loggingConfig:
                driverLogLevels:
                    string: string
            mainClass: string
            mainJarFileUri: string
            properties:
                string: string
        hiveConfig:
            continueOnFailure: false
            jarFileUris:
                - string
            properties:
                string: string
            queryFileUri: string
            queryLists:
                - string
            scriptVariables:
                string: string
        labels:
            string: string
        pigConfig:
            continueOnFailure: false
            jarFileUris:
                - string
            loggingConfig:
                driverLogLevels:
                    string: string
            properties:
                string: string
            queryFileUri: string
            queryLists:
                - string
            scriptVariables:
                string: string
        placement:
            clusterName: string
            clusterUuid: string
        prestoConfig:
            clientTags:
                - string
            continueOnFailure: false
            loggingConfig:
                driverLogLevels:
                    string: string
            outputFormat: string
            properties:
                string: string
            queryFileUri: string
            queryLists:
                - string
        project: string
        pysparkConfig:
            archiveUris:
                - string
            args:
                - string
            fileUris:
                - string
            jarFileUris:
                - string
            loggingConfig:
                driverLogLevels:
                    string: string
            mainPythonFileUri: string
            properties:
                string: string
            pythonFileUris:
                - string
        reference:
            jobId: string
        region: string
        scheduling:
            maxFailuresPerHour: 0
            maxFailuresTotal: 0
        sparkConfig:
            archiveUris:
                - string
            args:
                - string
            fileUris:
                - string
            jarFileUris:
                - string
            loggingConfig:
                driverLogLevels:
                    string: string
            mainClass: string
            mainJarFileUri: string
            properties:
                string: string
        sparksqlConfig:
            jarFileUris:
                - string
            loggingConfig:
                driverLogLevels:
                    string: string
            properties:
                string: string
            queryFileUri: string
            queryLists:
                - string
            scriptVariables:
                string: string
    

    Job Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The Job resource accepts the following input properties:

    Placement JobPlacement
    The config of job placement.
    ForceDelete bool
    By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
    HadoopConfig JobHadoopConfig
    The config of Hadoop job
    HiveConfig JobHiveConfig
    The config of hive job
    Labels Dictionary<string, string>
    The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
    PigConfig JobPigConfig
    The config of pag job.
    PrestoConfig JobPrestoConfig
    The config of presto job
    Project string
    The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
    PysparkConfig JobPysparkConfig
    The config of pySpark job.
    Reference JobReference
    The reference of the job
    Region string
    The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
    Scheduling JobScheduling
    Optional. Job scheduling configuration.
    SparkConfig JobSparkConfig
    The config of the Spark job.
    SparksqlConfig JobSparksqlConfig
    The config of SparkSql job
    Placement JobPlacementArgs
    The config of job placement.
    ForceDelete bool
    By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
    HadoopConfig JobHadoopConfigArgs
    The config of Hadoop job
    HiveConfig JobHiveConfigArgs
    The config of hive job
    Labels map[string]string
    The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
    PigConfig JobPigConfigArgs
    The config of pag job.
    PrestoConfig JobPrestoConfigArgs
    The config of presto job
    Project string
    The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
    PysparkConfig JobPysparkConfigArgs
    The config of pySpark job.
    Reference JobReferenceArgs
    The reference of the job
    Region string
    The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
    Scheduling JobSchedulingArgs
    Optional. Job scheduling configuration.
    SparkConfig JobSparkConfigArgs
    The config of the Spark job.
    SparksqlConfig JobSparksqlConfigArgs
    The config of SparkSql job
    placement JobPlacement
    The config of job placement.
    forceDelete Boolean
    By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
    hadoopConfig JobHadoopConfig
    The config of Hadoop job
    hiveConfig JobHiveConfig
    The config of hive job
    labels Map<String,String>
    The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
    pigConfig JobPigConfig
    The config of pag job.
    prestoConfig JobPrestoConfig
    The config of presto job
    project String
    The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
    pysparkConfig JobPysparkConfig
    The config of pySpark job.
    reference JobReference
    The reference of the job
    region String
    The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
    scheduling JobScheduling
    Optional. Job scheduling configuration.
    sparkConfig JobSparkConfig
    The config of the Spark job.
    sparksqlConfig JobSparksqlConfig
    The config of SparkSql job
    placement JobPlacement
    The config of job placement.
    forceDelete boolean
    By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
    hadoopConfig JobHadoopConfig
    The config of Hadoop job
    hiveConfig JobHiveConfig
    The config of hive job
    labels {[key: string]: string}
    The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
    pigConfig JobPigConfig
    The config of pag job.
    prestoConfig JobPrestoConfig
    The config of presto job
    project string
    The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
    pysparkConfig JobPysparkConfig
    The config of pySpark job.
    reference JobReference
    The reference of the job
    region string
    The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
    scheduling JobScheduling
    Optional. Job scheduling configuration.
    sparkConfig JobSparkConfig
    The config of the Spark job.
    sparksqlConfig JobSparksqlConfig
    The config of SparkSql job
    placement JobPlacementArgs
    The config of job placement.
    force_delete bool
    By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
    hadoop_config JobHadoopConfigArgs
    The config of Hadoop job
    hive_config JobHiveConfigArgs
    The config of hive job
    labels Mapping[str, str]
    The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
    pig_config JobPigConfigArgs
    The config of pag job.
    presto_config JobPrestoConfigArgs
    The config of presto job
    project str
    The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
    pyspark_config JobPysparkConfigArgs
    The config of pySpark job.
    reference JobReferenceArgs
    The reference of the job
    region str
    The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
    scheduling JobSchedulingArgs
    Optional. Job scheduling configuration.
    spark_config JobSparkConfigArgs
    The config of the Spark job.
    sparksql_config JobSparksqlConfigArgs
    The config of SparkSql job
    placement Property Map
    The config of job placement.
    forceDelete Boolean
    By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
    hadoopConfig Property Map
    The config of Hadoop job
    hiveConfig Property Map
    The config of hive job
    labels Map<String>
    The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
    pigConfig Property Map
    The config of pag job.
    prestoConfig Property Map
    The config of presto job
    project String
    The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
    pysparkConfig Property Map
    The config of pySpark job.
    reference Property Map
    The reference of the job
    region String
    The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
    scheduling Property Map
    Optional. Job scheduling configuration.
    sparkConfig Property Map
    The config of the Spark job.
    sparksqlConfig Property Map
    The config of SparkSql job

    Outputs

    All input properties are implicitly available as output properties. Additionally, the Job resource produces the following output properties:

    DriverControlsFilesUri string
    If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
    DriverOutputResourceUri string
    A URI pointing to the location of the stdout of the job's driver program.
    EffectiveLabels Dictionary<string, string>

    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.

    • scheduling.max_failures_per_hour - (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    • scheduling.max_failures_total - (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    Id string
    The provider-assigned unique ID for this managed resource.
    PulumiLabels Dictionary<string, string>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    Statuses List<JobStatus>
    The status of the job.
    DriverControlsFilesUri string
    If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
    DriverOutputResourceUri string
    A URI pointing to the location of the stdout of the job's driver program.
    EffectiveLabels map[string]string

    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.

    • scheduling.max_failures_per_hour - (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    • scheduling.max_failures_total - (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    Id string
    The provider-assigned unique ID for this managed resource.
    PulumiLabels map[string]string
    The combination of labels configured directly on the resource and default labels configured on the provider.
    Statuses []JobStatus
    The status of the job.
    driverControlsFilesUri String
    If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
    driverOutputResourceUri String
    A URI pointing to the location of the stdout of the job's driver program.
    effectiveLabels Map<String,String>

    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.

    • scheduling.max_failures_per_hour - (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    • scheduling.max_failures_total - (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    id String
    The provider-assigned unique ID for this managed resource.
    pulumiLabels Map<String,String>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    statuses List<JobStatus>
    The status of the job.
    driverControlsFilesUri string
    If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
    driverOutputResourceUri string
    A URI pointing to the location of the stdout of the job's driver program.
    effectiveLabels {[key: string]: string}

    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.

    • scheduling.max_failures_per_hour - (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    • scheduling.max_failures_total - (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    id string
    The provider-assigned unique ID for this managed resource.
    pulumiLabels {[key: string]: string}
    The combination of labels configured directly on the resource and default labels configured on the provider.
    statuses JobStatus[]
    The status of the job.
    driver_controls_files_uri str
    If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
    driver_output_resource_uri str
    A URI pointing to the location of the stdout of the job's driver program.
    effective_labels Mapping[str, str]

    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.

    • scheduling.max_failures_per_hour - (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    • scheduling.max_failures_total - (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    id str
    The provider-assigned unique ID for this managed resource.
    pulumi_labels Mapping[str, str]
    The combination of labels configured directly on the resource and default labels configured on the provider.
    statuses Sequence[JobStatus]
    The status of the job.
    driverControlsFilesUri String
    If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
    driverOutputResourceUri String
    A URI pointing to the location of the stdout of the job's driver program.
    effectiveLabels Map<String>

    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.

    • scheduling.max_failures_per_hour - (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    • scheduling.max_failures_total - (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    id String
    The provider-assigned unique ID for this managed resource.
    pulumiLabels Map<String>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    statuses List<Property Map>
    The status of the job.

    Look up Existing Job Resource

    Get an existing Job resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: JobState, opts?: CustomResourceOptions): Job
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            driver_controls_files_uri: Optional[str] = None,
            driver_output_resource_uri: Optional[str] = None,
            effective_labels: Optional[Mapping[str, str]] = None,
            force_delete: Optional[bool] = None,
            hadoop_config: Optional[JobHadoopConfigArgs] = None,
            hive_config: Optional[JobHiveConfigArgs] = None,
            labels: Optional[Mapping[str, str]] = None,
            pig_config: Optional[JobPigConfigArgs] = None,
            placement: Optional[JobPlacementArgs] = None,
            presto_config: Optional[JobPrestoConfigArgs] = None,
            project: Optional[str] = None,
            pulumi_labels: Optional[Mapping[str, str]] = None,
            pyspark_config: Optional[JobPysparkConfigArgs] = None,
            reference: Optional[JobReferenceArgs] = None,
            region: Optional[str] = None,
            scheduling: Optional[JobSchedulingArgs] = None,
            spark_config: Optional[JobSparkConfigArgs] = None,
            sparksql_config: Optional[JobSparksqlConfigArgs] = None,
            statuses: Optional[Sequence[JobStatusArgs]] = None) -> Job
    func GetJob(ctx *Context, name string, id IDInput, state *JobState, opts ...ResourceOption) (*Job, error)
    public static Job Get(string name, Input<string> id, JobState? state, CustomResourceOptions? opts = null)
    public static Job get(String name, Output<String> id, JobState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    DriverControlsFilesUri string
    If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
    DriverOutputResourceUri string
    A URI pointing to the location of the stdout of the job's driver program.
    EffectiveLabels Dictionary<string, string>

    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.

    • scheduling.max_failures_per_hour - (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    • scheduling.max_failures_total - (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    ForceDelete bool
    By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
    HadoopConfig JobHadoopConfig
    The config of Hadoop job
    HiveConfig JobHiveConfig
    The config of hive job
    Labels Dictionary<string, string>
    The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
    PigConfig JobPigConfig
    The config of pag job.
    Placement JobPlacement
    The config of job placement.
    PrestoConfig JobPrestoConfig
    The config of presto job
    Project string
    The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
    PulumiLabels Dictionary<string, string>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    PysparkConfig JobPysparkConfig
    The config of pySpark job.
    Reference JobReference
    The reference of the job
    Region string
    The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
    Scheduling JobScheduling
    Optional. Job scheduling configuration.
    SparkConfig JobSparkConfig
    The config of the Spark job.
    SparksqlConfig JobSparksqlConfig
    The config of SparkSql job
    Statuses List<JobStatus>
    The status of the job.
    DriverControlsFilesUri string
    If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
    DriverOutputResourceUri string
    A URI pointing to the location of the stdout of the job's driver program.
    EffectiveLabels map[string]string

    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.

    • scheduling.max_failures_per_hour - (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    • scheduling.max_failures_total - (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    ForceDelete bool
    By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
    HadoopConfig JobHadoopConfigArgs
    The config of Hadoop job
    HiveConfig JobHiveConfigArgs
    The config of hive job
    Labels map[string]string
    The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
    PigConfig JobPigConfigArgs
    The config of pag job.
    Placement JobPlacementArgs
    The config of job placement.
    PrestoConfig JobPrestoConfigArgs
    The config of presto job
    Project string
    The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
    PulumiLabels map[string]string
    The combination of labels configured directly on the resource and default labels configured on the provider.
    PysparkConfig JobPysparkConfigArgs
    The config of pySpark job.
    Reference JobReferenceArgs
    The reference of the job
    Region string
    The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
    Scheduling JobSchedulingArgs
    Optional. Job scheduling configuration.
    SparkConfig JobSparkConfigArgs
    The config of the Spark job.
    SparksqlConfig JobSparksqlConfigArgs
    The config of SparkSql job
    Statuses []JobStatusArgs
    The status of the job.
    driverControlsFilesUri String
    If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
    driverOutputResourceUri String
    A URI pointing to the location of the stdout of the job's driver program.
    effectiveLabels Map<String,String>

    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.

    • scheduling.max_failures_per_hour - (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    • scheduling.max_failures_total - (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    forceDelete Boolean
    By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
    hadoopConfig JobHadoopConfig
    The config of Hadoop job
    hiveConfig JobHiveConfig
    The config of hive job
    labels Map<String,String>
    The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
    pigConfig JobPigConfig
    The config of pag job.
    placement JobPlacement
    The config of job placement.
    prestoConfig JobPrestoConfig
    The config of presto job
    project String
    The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
    pulumiLabels Map<String,String>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    pysparkConfig JobPysparkConfig
    The config of pySpark job.
    reference JobReference
    The reference of the job
    region String
    The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
    scheduling JobScheduling
    Optional. Job scheduling configuration.
    sparkConfig JobSparkConfig
    The config of the Spark job.
    sparksqlConfig JobSparksqlConfig
    The config of SparkSql job
    statuses List<JobStatus>
    The status of the job.
    driverControlsFilesUri string
    If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
    driverOutputResourceUri string
    A URI pointing to the location of the stdout of the job's driver program.
    effectiveLabels {[key: string]: string}

    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.

    • scheduling.max_failures_per_hour - (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    • scheduling.max_failures_total - (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    forceDelete boolean
    By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
    hadoopConfig JobHadoopConfig
    The config of Hadoop job
    hiveConfig JobHiveConfig
    The config of hive job
    labels {[key: string]: string}
    The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
    pigConfig JobPigConfig
    The config of pag job.
    placement JobPlacement
    The config of job placement.
    prestoConfig JobPrestoConfig
    The config of presto job
    project string
    The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
    pulumiLabels {[key: string]: string}
    The combination of labels configured directly on the resource and default labels configured on the provider.
    pysparkConfig JobPysparkConfig
    The config of pySpark job.
    reference JobReference
    The reference of the job
    region string
    The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
    scheduling JobScheduling
    Optional. Job scheduling configuration.
    sparkConfig JobSparkConfig
    The config of the Spark job.
    sparksqlConfig JobSparksqlConfig
    The config of SparkSql job
    statuses JobStatus[]
    The status of the job.
    driver_controls_files_uri str
    If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
    driver_output_resource_uri str
    A URI pointing to the location of the stdout of the job's driver program.
    effective_labels Mapping[str, str]

    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.

    • scheduling.max_failures_per_hour - (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    • scheduling.max_failures_total - (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    force_delete bool
    By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
    hadoop_config JobHadoopConfigArgs
    The config of Hadoop job
    hive_config JobHiveConfigArgs
    The config of hive job
    labels Mapping[str, str]
    The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
    pig_config JobPigConfigArgs
    The config of pag job.
    placement JobPlacementArgs
    The config of job placement.
    presto_config JobPrestoConfigArgs
    The config of presto job
    project str
    The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
    pulumi_labels Mapping[str, str]
    The combination of labels configured directly on the resource and default labels configured on the provider.
    pyspark_config JobPysparkConfigArgs
    The config of pySpark job.
    reference JobReferenceArgs
    The reference of the job
    region str
    The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
    scheduling JobSchedulingArgs
    Optional. Job scheduling configuration.
    spark_config JobSparkConfigArgs
    The config of the Spark job.
    sparksql_config JobSparksqlConfigArgs
    The config of SparkSql job
    statuses Sequence[JobStatusArgs]
    The status of the job.
    driverControlsFilesUri String
    If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
    driverOutputResourceUri String
    A URI pointing to the location of the stdout of the job's driver program.
    effectiveLabels Map<String>

    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.

    • scheduling.max_failures_per_hour - (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    • scheduling.max_failures_total - (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    forceDelete Boolean
    By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
    hadoopConfig Property Map
    The config of Hadoop job
    hiveConfig Property Map
    The config of hive job
    labels Map<String>
    The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
    pigConfig Property Map
    The config of pag job.
    placement Property Map
    The config of job placement.
    prestoConfig Property Map
    The config of presto job
    project String
    The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
    pulumiLabels Map<String>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    pysparkConfig Property Map
    The config of pySpark job.
    reference Property Map
    The reference of the job
    region String
    The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
    scheduling Property Map
    Optional. Job scheduling configuration.
    sparkConfig Property Map
    The config of the Spark job.
    sparksqlConfig Property Map
    The config of SparkSql job
    statuses List<Property Map>
    The status of the job.

    Supporting Types

    JobHadoopConfig, JobHadoopConfigArgs

    ArchiveUris List<string>
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    Args List<string>
    The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
    FileUris List<string>
    HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
    JarFileUris List<string>
    HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
    LoggingConfig JobHadoopConfigLoggingConfig
    The runtime logging config of the job
    MainClass string
    The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri
    MainJarFileUri string
    The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class
    Properties Dictionary<string, string>
    A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code..

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    ArchiveUris []string
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    Args []string
    The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
    FileUris []string
    HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
    JarFileUris []string
    HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
    LoggingConfig JobHadoopConfigLoggingConfig
    The runtime logging config of the job
    MainClass string
    The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri
    MainJarFileUri string
    The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class
    Properties map[string]string
    A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code..

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    archiveUris List<String>
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    args List<String>
    The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
    fileUris List<String>
    HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
    jarFileUris List<String>
    HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
    loggingConfig JobHadoopConfigLoggingConfig
    The runtime logging config of the job
    mainClass String
    The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri
    mainJarFileUri String
    The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class
    properties Map<String,String>
    A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code..

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    archiveUris string[]
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    args string[]
    The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
    fileUris string[]
    HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
    jarFileUris string[]
    HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
    loggingConfig JobHadoopConfigLoggingConfig
    The runtime logging config of the job
    mainClass string
    The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri
    mainJarFileUri string
    The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class
    properties {[key: string]: string}
    A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code..

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    archive_uris Sequence[str]
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    args Sequence[str]
    The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
    file_uris Sequence[str]
    HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
    jar_file_uris Sequence[str]
    HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
    logging_config JobHadoopConfigLoggingConfig
    The runtime logging config of the job
    main_class str
    The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri
    main_jar_file_uri str
    The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class
    properties Mapping[str, str]
    A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code..

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    archiveUris List<String>
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    args List<String>
    The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
    fileUris List<String>
    HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
    jarFileUris List<String>
    HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
    loggingConfig Property Map
    The runtime logging config of the job
    mainClass String
    The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri
    mainJarFileUri String
    The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class
    properties Map<String>
    A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code..

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'

    JobHadoopConfigLoggingConfig, JobHadoopConfigLoggingConfigArgs

    DriverLogLevels Dictionary<string, string>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    DriverLogLevels map[string]string
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels Map<String,String>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels {[key: string]: string}
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driver_log_levels Mapping[str, str]
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels Map<String>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.

    JobHiveConfig, JobHiveConfigArgs

    ContinueOnFailure bool
    Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    JarFileUris List<string>
    HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
    Properties Dictionary<string, string>
    A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code..
    QueryFileUri string
    HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
    QueryLists List<string>
    The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
    ScriptVariables Dictionary<string, string>
    Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
    ContinueOnFailure bool
    Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    JarFileUris []string
    HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
    Properties map[string]string
    A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code..
    QueryFileUri string
    HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
    QueryLists []string
    The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
    ScriptVariables map[string]string
    Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
    continueOnFailure Boolean
    Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    jarFileUris List<String>
    HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
    properties Map<String,String>
    A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code..
    queryFileUri String
    HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
    queryLists List<String>
    The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
    scriptVariables Map<String,String>
    Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
    continueOnFailure boolean
    Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    jarFileUris string[]
    HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
    properties {[key: string]: string}
    A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code..
    queryFileUri string
    HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
    queryLists string[]
    The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
    scriptVariables {[key: string]: string}
    Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
    continue_on_failure bool
    Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    jar_file_uris Sequence[str]
    HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
    properties Mapping[str, str]
    A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code..
    query_file_uri str
    HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
    query_lists Sequence[str]
    The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
    script_variables Mapping[str, str]
    Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
    continueOnFailure Boolean
    Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    jarFileUris List<String>
    HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
    properties Map<String>
    A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code..
    queryFileUri String
    HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
    queryLists List<String>
    The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
    scriptVariables Map<String>
    Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).

    JobPigConfig, JobPigConfigArgs

    ContinueOnFailure bool
    Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    JarFileUris List<string>
    HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    LoggingConfig JobPigConfigLoggingConfig
    The runtime logging config of the job
    Properties Dictionary<string, string>
    A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
    QueryFileUri string
    HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
    QueryLists List<string>
    The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
    ScriptVariables Dictionary<string, string>
    Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
    ContinueOnFailure bool
    Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    JarFileUris []string
    HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    LoggingConfig JobPigConfigLoggingConfig
    The runtime logging config of the job
    Properties map[string]string
    A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
    QueryFileUri string
    HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
    QueryLists []string
    The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
    ScriptVariables map[string]string
    Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
    continueOnFailure Boolean
    Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    jarFileUris List<String>
    HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    loggingConfig JobPigConfigLoggingConfig
    The runtime logging config of the job
    properties Map<String,String>
    A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
    queryFileUri String
    HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
    queryLists List<String>
    The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
    scriptVariables Map<String,String>
    Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
    continueOnFailure boolean
    Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    jarFileUris string[]
    HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    loggingConfig JobPigConfigLoggingConfig
    The runtime logging config of the job
    properties {[key: string]: string}
    A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
    queryFileUri string
    HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
    queryLists string[]
    The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
    scriptVariables {[key: string]: string}
    Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
    continue_on_failure bool
    Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    jar_file_uris Sequence[str]
    HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    logging_config JobPigConfigLoggingConfig
    The runtime logging config of the job
    properties Mapping[str, str]
    A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
    query_file_uri str
    HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
    query_lists Sequence[str]
    The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
    script_variables Mapping[str, str]
    Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
    continueOnFailure Boolean
    Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    jarFileUris List<String>
    HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    loggingConfig Property Map
    The runtime logging config of the job
    properties Map<String>
    A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
    queryFileUri String
    HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
    queryLists List<String>
    The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
    scriptVariables Map<String>
    Mapping of query variable names to values (equivalent to the Pig command: name=[value]).

    JobPigConfigLoggingConfig, JobPigConfigLoggingConfigArgs

    DriverLogLevels Dictionary<string, string>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    DriverLogLevels map[string]string
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels Map<String,String>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels {[key: string]: string}
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driver_log_levels Mapping[str, str]
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels Map<String>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.

    JobPlacement, JobPlacementArgs

    ClusterName string
    The name of the cluster where the job will be submitted
    ClusterUuid string
    Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted
    ClusterName string
    The name of the cluster where the job will be submitted
    ClusterUuid string
    Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted
    clusterName String
    The name of the cluster where the job will be submitted
    clusterUuid String
    Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted
    clusterName string
    The name of the cluster where the job will be submitted
    clusterUuid string
    Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted
    cluster_name str
    The name of the cluster where the job will be submitted
    cluster_uuid str
    Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted
    clusterName String
    The name of the cluster where the job will be submitted
    clusterUuid String
    Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted

    JobPrestoConfig, JobPrestoConfigArgs

    ClientTags List<string>
    Presto client tags to attach to this query.
    ContinueOnFailure bool
    Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    LoggingConfig JobPrestoConfigLoggingConfig
    The runtime logging config of the job
    OutputFormat string
    The format in which query output will be displayed. See the Presto documentation for supported output formats.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    Properties Dictionary<string, string>
    A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
    QueryFileUri string
    The HCFS URI of the script that contains SQL queries. Conflicts with query_list
    QueryLists List<string>
    The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
    ClientTags []string
    Presto client tags to attach to this query.
    ContinueOnFailure bool
    Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    LoggingConfig JobPrestoConfigLoggingConfig
    The runtime logging config of the job
    OutputFormat string
    The format in which query output will be displayed. See the Presto documentation for supported output formats.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    Properties map[string]string
    A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
    QueryFileUri string
    The HCFS URI of the script that contains SQL queries. Conflicts with query_list
    QueryLists []string
    The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
    clientTags List<String>
    Presto client tags to attach to this query.
    continueOnFailure Boolean
    Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    loggingConfig JobPrestoConfigLoggingConfig
    The runtime logging config of the job
    outputFormat String
    The format in which query output will be displayed. See the Presto documentation for supported output formats.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    properties Map<String,String>
    A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
    queryFileUri String
    The HCFS URI of the script that contains SQL queries. Conflicts with query_list
    queryLists List<String>
    The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
    clientTags string[]
    Presto client tags to attach to this query.
    continueOnFailure boolean
    Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    loggingConfig JobPrestoConfigLoggingConfig
    The runtime logging config of the job
    outputFormat string
    The format in which query output will be displayed. See the Presto documentation for supported output formats.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    properties {[key: string]: string}
    A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
    queryFileUri string
    The HCFS URI of the script that contains SQL queries. Conflicts with query_list
    queryLists string[]
    The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
    client_tags Sequence[str]
    Presto client tags to attach to this query.
    continue_on_failure bool
    Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    logging_config JobPrestoConfigLoggingConfig
    The runtime logging config of the job
    output_format str
    The format in which query output will be displayed. See the Presto documentation for supported output formats.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    properties Mapping[str, str]
    A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
    query_file_uri str
    The HCFS URI of the script that contains SQL queries. Conflicts with query_list
    query_lists Sequence[str]
    The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
    clientTags List<String>
    Presto client tags to attach to this query.
    continueOnFailure Boolean
    Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
    loggingConfig Property Map
    The runtime logging config of the job
    outputFormat String
    The format in which query output will be displayed. See the Presto documentation for supported output formats.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    properties Map<String>
    A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
    queryFileUri String
    The HCFS URI of the script that contains SQL queries. Conflicts with query_list
    queryLists List<String>
    The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri

    JobPrestoConfigLoggingConfig, JobPrestoConfigLoggingConfigArgs

    DriverLogLevels Dictionary<string, string>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    DriverLogLevels map[string]string
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels Map<String,String>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels {[key: string]: string}
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driver_log_levels Mapping[str, str]
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels Map<String>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.

    JobPysparkConfig, JobPysparkConfigArgs

    MainPythonFileUri string
    The HCFS URI of the main Python file to use as the driver. Must be a .py file.
    ArchiveUris List<string>
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    Args List<string>
    The arguments to pass to the driver.
    FileUris List<string>
    HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
    JarFileUris List<string>
    HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
    LoggingConfig JobPysparkConfigLoggingConfig
    The runtime logging config of the job
    Properties Dictionary<string, string>
    A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    PythonFileUris List<string>
    HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
    MainPythonFileUri string
    The HCFS URI of the main Python file to use as the driver. Must be a .py file.
    ArchiveUris []string
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    Args []string
    The arguments to pass to the driver.
    FileUris []string
    HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
    JarFileUris []string
    HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
    LoggingConfig JobPysparkConfigLoggingConfig
    The runtime logging config of the job
    Properties map[string]string
    A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    PythonFileUris []string
    HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
    mainPythonFileUri String
    The HCFS URI of the main Python file to use as the driver. Must be a .py file.
    archiveUris List<String>
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    args List<String>
    The arguments to pass to the driver.
    fileUris List<String>
    HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
    jarFileUris List<String>
    HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
    loggingConfig JobPysparkConfigLoggingConfig
    The runtime logging config of the job
    properties Map<String,String>
    A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    pythonFileUris List<String>
    HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
    mainPythonFileUri string
    The HCFS URI of the main Python file to use as the driver. Must be a .py file.
    archiveUris string[]
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    args string[]
    The arguments to pass to the driver.
    fileUris string[]
    HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
    jarFileUris string[]
    HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
    loggingConfig JobPysparkConfigLoggingConfig
    The runtime logging config of the job
    properties {[key: string]: string}
    A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    pythonFileUris string[]
    HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
    main_python_file_uri str
    The HCFS URI of the main Python file to use as the driver. Must be a .py file.
    archive_uris Sequence[str]
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    args Sequence[str]
    The arguments to pass to the driver.
    file_uris Sequence[str]
    HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
    jar_file_uris Sequence[str]
    HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
    logging_config JobPysparkConfigLoggingConfig
    The runtime logging config of the job
    properties Mapping[str, str]
    A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    python_file_uris Sequence[str]
    HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
    mainPythonFileUri String
    The HCFS URI of the main Python file to use as the driver. Must be a .py file.
    archiveUris List<String>
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    args List<String>
    The arguments to pass to the driver.
    fileUris List<String>
    HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
    jarFileUris List<String>
    HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
    loggingConfig Property Map
    The runtime logging config of the job
    properties Map<String>
    A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    pythonFileUris List<String>
    HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.

    JobPysparkConfigLoggingConfig, JobPysparkConfigLoggingConfigArgs

    DriverLogLevels Dictionary<string, string>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    DriverLogLevels map[string]string
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels Map<String,String>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels {[key: string]: string}
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driver_log_levels Mapping[str, str]
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels Map<String>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.

    JobReference, JobReferenceArgs

    JobId string
    The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
    JobId string
    The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
    jobId String
    The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
    jobId string
    The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
    job_id str
    The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
    jobId String
    The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs

    JobScheduling, JobSchedulingArgs

    MaxFailuresPerHour int
    Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
    MaxFailuresTotal int
    Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
    MaxFailuresPerHour int
    Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
    MaxFailuresTotal int
    Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
    maxFailuresPerHour Integer
    Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
    maxFailuresTotal Integer
    Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
    maxFailuresPerHour number
    Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
    maxFailuresTotal number
    Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
    max_failures_per_hour int
    Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
    max_failures_total int
    Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
    maxFailuresPerHour Number
    Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
    maxFailuresTotal Number
    Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.

    JobSparkConfig, JobSparkConfigArgs

    ArchiveUris List<string>
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    Args List<string>
    The arguments to pass to the driver.
    FileUris List<string>
    HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
    JarFileUris List<string>
    HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
    LoggingConfig JobSparkConfigLoggingConfig
    The runtime logging config of the job
    MainClass string
    The class containing the main method of the driver. Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
    MainJarFileUri string
    The HCFS URI of jar file containing the driver jar. Conflicts with main_class
    Properties Dictionary<string, string>
    A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    ArchiveUris []string
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    Args []string
    The arguments to pass to the driver.
    FileUris []string
    HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
    JarFileUris []string
    HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
    LoggingConfig JobSparkConfigLoggingConfig
    The runtime logging config of the job
    MainClass string
    The class containing the main method of the driver. Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
    MainJarFileUri string
    The HCFS URI of jar file containing the driver jar. Conflicts with main_class
    Properties map[string]string
    A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    archiveUris List<String>
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    args List<String>
    The arguments to pass to the driver.
    fileUris List<String>
    HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
    jarFileUris List<String>
    HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
    loggingConfig JobSparkConfigLoggingConfig
    The runtime logging config of the job
    mainClass String
    The class containing the main method of the driver. Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
    mainJarFileUri String
    The HCFS URI of jar file containing the driver jar. Conflicts with main_class
    properties Map<String,String>
    A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    archiveUris string[]
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    args string[]
    The arguments to pass to the driver.
    fileUris string[]
    HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
    jarFileUris string[]
    HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
    loggingConfig JobSparkConfigLoggingConfig
    The runtime logging config of the job
    mainClass string
    The class containing the main method of the driver. Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
    mainJarFileUri string
    The HCFS URI of jar file containing the driver jar. Conflicts with main_class
    properties {[key: string]: string}
    A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    archive_uris Sequence[str]
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    args Sequence[str]
    The arguments to pass to the driver.
    file_uris Sequence[str]
    HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
    jar_file_uris Sequence[str]
    HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
    logging_config JobSparkConfigLoggingConfig
    The runtime logging config of the job
    main_class str
    The class containing the main method of the driver. Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
    main_jar_file_uri str
    The HCFS URI of jar file containing the driver jar. Conflicts with main_class
    properties Mapping[str, str]
    A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    archiveUris List<String>
    HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
    args List<String>
    The arguments to pass to the driver.
    fileUris List<String>
    HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
    jarFileUris List<String>
    HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
    loggingConfig Property Map
    The runtime logging config of the job
    mainClass String
    The class containing the main method of the driver. Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
    mainJarFileUri String
    The HCFS URI of jar file containing the driver jar. Conflicts with main_class
    properties Map<String>
    A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'

    JobSparkConfigLoggingConfig, JobSparkConfigLoggingConfigArgs

    DriverLogLevels Dictionary<string, string>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    DriverLogLevels map[string]string
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels Map<String,String>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels {[key: string]: string}
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driver_log_levels Mapping[str, str]
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels Map<String>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.

    JobSparksqlConfig, JobSparksqlConfigArgs

    JarFileUris List<string>
    HCFS URIs of jar files to be added to the Spark CLASSPATH.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    LoggingConfig JobSparksqlConfigLoggingConfig
    The runtime logging config of the job
    Properties Dictionary<string, string>
    A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
    QueryFileUri string
    The HCFS URI of the script that contains SQL queries. Conflicts with query_list
    QueryLists List<string>
    The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
    ScriptVariables Dictionary<string, string>
    Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
    JarFileUris []string
    HCFS URIs of jar files to be added to the Spark CLASSPATH.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    LoggingConfig JobSparksqlConfigLoggingConfig
    The runtime logging config of the job
    Properties map[string]string
    A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
    QueryFileUri string
    The HCFS URI of the script that contains SQL queries. Conflicts with query_list
    QueryLists []string
    The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
    ScriptVariables map[string]string
    Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
    jarFileUris List<String>
    HCFS URIs of jar files to be added to the Spark CLASSPATH.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    loggingConfig JobSparksqlConfigLoggingConfig
    The runtime logging config of the job
    properties Map<String,String>
    A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
    queryFileUri String
    The HCFS URI of the script that contains SQL queries. Conflicts with query_list
    queryLists List<String>
    The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
    scriptVariables Map<String,String>
    Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
    jarFileUris string[]
    HCFS URIs of jar files to be added to the Spark CLASSPATH.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    loggingConfig JobSparksqlConfigLoggingConfig
    The runtime logging config of the job
    properties {[key: string]: string}
    A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
    queryFileUri string
    The HCFS URI of the script that contains SQL queries. Conflicts with query_list
    queryLists string[]
    The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
    scriptVariables {[key: string]: string}
    Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
    jar_file_uris Sequence[str]
    HCFS URIs of jar files to be added to the Spark CLASSPATH.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    logging_config JobSparksqlConfigLoggingConfig
    The runtime logging config of the job
    properties Mapping[str, str]
    A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
    query_file_uri str
    The HCFS URI of the script that contains SQL queries. Conflicts with query_list
    query_lists Sequence[str]
    The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
    script_variables Mapping[str, str]
    Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
    jarFileUris List<String>
    HCFS URIs of jar files to be added to the Spark CLASSPATH.

    • logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
    loggingConfig Property Map
    The runtime logging config of the job
    properties Map<String>
    A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
    queryFileUri String
    The HCFS URI of the script that contains SQL queries. Conflicts with query_list
    queryLists List<String>
    The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
    scriptVariables Map<String>
    Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).

    JobSparksqlConfigLoggingConfig, JobSparksqlConfigLoggingConfigArgs

    DriverLogLevels Dictionary<string, string>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    DriverLogLevels map[string]string
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels Map<String,String>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels {[key: string]: string}
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driver_log_levels Mapping[str, str]
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
    driverLogLevels Map<String>
    Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.

    JobStatus, JobStatusArgs

    Details string
    Optional job state details, such as an error description if the state is ERROR.
    State string
    A state message specifying the overall job state.
    StateStartTime string
    The time when this state was entered.
    Substate string
    Additional state information, which includes status reported by the agent.
    Details string
    Optional job state details, such as an error description if the state is ERROR.
    State string
    A state message specifying the overall job state.
    StateStartTime string
    The time when this state was entered.
    Substate string
    Additional state information, which includes status reported by the agent.
    details String
    Optional job state details, such as an error description if the state is ERROR.
    state String
    A state message specifying the overall job state.
    stateStartTime String
    The time when this state was entered.
    substate String
    Additional state information, which includes status reported by the agent.
    details string
    Optional job state details, such as an error description if the state is ERROR.
    state string
    A state message specifying the overall job state.
    stateStartTime string
    The time when this state was entered.
    substate string
    Additional state information, which includes status reported by the agent.
    details str
    Optional job state details, such as an error description if the state is ERROR.
    state str
    A state message specifying the overall job state.
    state_start_time str
    The time when this state was entered.
    substate str
    Additional state information, which includes status reported by the agent.
    details String
    Optional job state details, such as an error description if the state is ERROR.
    state String
    A state message specifying the overall job state.
    stateStartTime String
    The time when this state was entered.
    substate String
    Additional state information, which includes status reported by the agent.

    Import

    This resource does not support import.

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    Google Cloud (GCP) Classic pulumi/pulumi-gcp
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the google-beta Terraform Provider.
    gcp logo
    Google Cloud Classic v7.19.0 published on Thursday, Apr 18, 2024 by Pulumi