1. Packages
  2. Databricks
  3. API Docs
  4. Job
Databricks v1.49.0 published on Monday, Aug 26, 2024 by Pulumi

databricks.Job

Explore with Pulumi AI

databricks logo
Databricks v1.49.0 published on Monday, Aug 26, 2024 by Pulumi

    The databricks.Job resource allows you to manage Databricks Jobs to run non-interactive code in a databricks_cluster.

    Example Usage

    Note In Pulumi configuration, it is recommended to define tasks in alphabetical order of their task_key arguments, so that you get consistent and readable diff. Whenever tasks are added or removed, or task_key is renamed, you’ll observe a change in the majority of tasks. It’s related to the fact that the current version of the provider treats task blocks as an ordered list. Alternatively, task block could have been an unordered set, though end-users would see the entire block replaced upon a change in single property of the task.

    It is possible to create a Databricks job using task blocks. A single task is defined with the task block containing one of the *_task blocks, task_key, and additional arguments described below.

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const _this = new databricks.Job("this", {
        name: "Job with multiple tasks",
        description: "This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished.",
        jobClusters: [{
            jobClusterKey: "j",
            newCluster: {
                numWorkers: 2,
                sparkVersion: latest.id,
                nodeTypeId: smallest.id,
            },
        }],
        tasks: [
            {
                taskKey: "a",
                newCluster: {
                    numWorkers: 1,
                    sparkVersion: latest.id,
                    nodeTypeId: smallest.id,
                },
                notebookTask: {
                    notebookPath: thisDatabricksNotebook.path,
                },
            },
            {
                taskKey: "b",
                dependsOns: [{
                    taskKey: "a",
                }],
                existingClusterId: shared.id,
                sparkJarTask: {
                    mainClassName: "com.acme.data.Main",
                },
            },
            {
                taskKey: "c",
                jobClusterKey: "j",
                notebookTask: {
                    notebookPath: thisDatabricksNotebook.path,
                },
            },
            {
                taskKey: "d",
                pipelineTask: {
                    pipelineId: thisDatabricksPipeline.id,
                },
            },
        ],
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    this = databricks.Job("this",
        name="Job with multiple tasks",
        description="This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished.",
        job_clusters=[{
            "job_cluster_key": "j",
            "new_cluster": {
                "num_workers": 2,
                "spark_version": latest["id"],
                "node_type_id": smallest["id"],
            },
        }],
        tasks=[
            {
                "task_key": "a",
                "new_cluster": {
                    "num_workers": 1,
                    "spark_version": latest["id"],
                    "node_type_id": smallest["id"],
                },
                "notebook_task": {
                    "notebook_path": this_databricks_notebook["path"],
                },
            },
            {
                "task_key": "b",
                "depends_ons": [{
                    "task_key": "a",
                }],
                "existing_cluster_id": shared["id"],
                "spark_jar_task": {
                    "main_class_name": "com.acme.data.Main",
                },
            },
            {
                "task_key": "c",
                "job_cluster_key": "j",
                "notebook_task": {
                    "notebook_path": this_databricks_notebook["path"],
                },
            },
            {
                "task_key": "d",
                "pipeline_task": {
                    "pipeline_id": this_databricks_pipeline["id"],
                },
            },
        ])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewJob(ctx, "this", &databricks.JobArgs{
    			Name:        pulumi.String("Job with multiple tasks"),
    			Description: pulumi.String("This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished."),
    			JobClusters: databricks.JobJobClusterArray{
    				&databricks.JobJobClusterArgs{
    					JobClusterKey: pulumi.String("j"),
    					NewCluster: &databricks.JobJobClusterNewClusterArgs{
    						NumWorkers:   pulumi.Int(2),
    						SparkVersion: pulumi.Any(latest.Id),
    						NodeTypeId:   pulumi.Any(smallest.Id),
    					},
    				},
    			},
    			Tasks: databricks.JobTaskArray{
    				&databricks.JobTaskArgs{
    					TaskKey: pulumi.String("a"),
    					NewCluster: &databricks.JobTaskNewClusterArgs{
    						NumWorkers:   pulumi.Int(1),
    						SparkVersion: pulumi.Any(latest.Id),
    						NodeTypeId:   pulumi.Any(smallest.Id),
    					},
    					NotebookTask: &databricks.JobTaskNotebookTaskArgs{
    						NotebookPath: pulumi.Any(thisDatabricksNotebook.Path),
    					},
    				},
    				&databricks.JobTaskArgs{
    					TaskKey: pulumi.String("b"),
    					DependsOns: databricks.JobTaskDependsOnArray{
    						&databricks.JobTaskDependsOnArgs{
    							TaskKey: pulumi.String("a"),
    						},
    					},
    					ExistingClusterId: pulumi.Any(shared.Id),
    					SparkJarTask: &databricks.JobTaskSparkJarTaskArgs{
    						MainClassName: pulumi.String("com.acme.data.Main"),
    					},
    				},
    				&databricks.JobTaskArgs{
    					TaskKey:       pulumi.String("c"),
    					JobClusterKey: pulumi.String("j"),
    					NotebookTask: &databricks.JobTaskNotebookTaskArgs{
    						NotebookPath: pulumi.Any(thisDatabricksNotebook.Path),
    					},
    				},
    				&databricks.JobTaskArgs{
    					TaskKey: pulumi.String("d"),
    					PipelineTask: &databricks.JobTaskPipelineTaskArgs{
    						PipelineId: pulumi.Any(thisDatabricksPipeline.Id),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var @this = new Databricks.Job("this", new()
        {
            Name = "Job with multiple tasks",
            Description = "This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished.",
            JobClusters = new[]
            {
                new Databricks.Inputs.JobJobClusterArgs
                {
                    JobClusterKey = "j",
                    NewCluster = new Databricks.Inputs.JobJobClusterNewClusterArgs
                    {
                        NumWorkers = 2,
                        SparkVersion = latest.Id,
                        NodeTypeId = smallest.Id,
                    },
                },
            },
            Tasks = new[]
            {
                new Databricks.Inputs.JobTaskArgs
                {
                    TaskKey = "a",
                    NewCluster = new Databricks.Inputs.JobTaskNewClusterArgs
                    {
                        NumWorkers = 1,
                        SparkVersion = latest.Id,
                        NodeTypeId = smallest.Id,
                    },
                    NotebookTask = new Databricks.Inputs.JobTaskNotebookTaskArgs
                    {
                        NotebookPath = thisDatabricksNotebook.Path,
                    },
                },
                new Databricks.Inputs.JobTaskArgs
                {
                    TaskKey = "b",
                    DependsOns = new[]
                    {
                        new Databricks.Inputs.JobTaskDependsOnArgs
                        {
                            TaskKey = "a",
                        },
                    },
                    ExistingClusterId = shared.Id,
                    SparkJarTask = new Databricks.Inputs.JobTaskSparkJarTaskArgs
                    {
                        MainClassName = "com.acme.data.Main",
                    },
                },
                new Databricks.Inputs.JobTaskArgs
                {
                    TaskKey = "c",
                    JobClusterKey = "j",
                    NotebookTask = new Databricks.Inputs.JobTaskNotebookTaskArgs
                    {
                        NotebookPath = thisDatabricksNotebook.Path,
                    },
                },
                new Databricks.Inputs.JobTaskArgs
                {
                    TaskKey = "d",
                    PipelineTask = new Databricks.Inputs.JobTaskPipelineTaskArgs
                    {
                        PipelineId = thisDatabricksPipeline.Id,
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.Job;
    import com.pulumi.databricks.JobArgs;
    import com.pulumi.databricks.inputs.JobJobClusterArgs;
    import com.pulumi.databricks.inputs.JobJobClusterNewClusterArgs;
    import com.pulumi.databricks.inputs.JobTaskArgs;
    import com.pulumi.databricks.inputs.JobTaskNewClusterArgs;
    import com.pulumi.databricks.inputs.JobTaskNotebookTaskArgs;
    import com.pulumi.databricks.inputs.JobTaskSparkJarTaskArgs;
    import com.pulumi.databricks.inputs.JobTaskPipelineTaskArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var this_ = new Job("this", JobArgs.builder()
                .name("Job with multiple tasks")
                .description("This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished.")
                .jobClusters(JobJobClusterArgs.builder()
                    .jobClusterKey("j")
                    .newCluster(JobJobClusterNewClusterArgs.builder()
                        .numWorkers(2)
                        .sparkVersion(latest.id())
                        .nodeTypeId(smallest.id())
                        .build())
                    .build())
                .tasks(            
                    JobTaskArgs.builder()
                        .taskKey("a")
                        .newCluster(JobTaskNewClusterArgs.builder()
                            .numWorkers(1)
                            .sparkVersion(latest.id())
                            .nodeTypeId(smallest.id())
                            .build())
                        .notebookTask(JobTaskNotebookTaskArgs.builder()
                            .notebookPath(thisDatabricksNotebook.path())
                            .build())
                        .build(),
                    JobTaskArgs.builder()
                        .taskKey("b")
                        .dependsOns(JobTaskDependsOnArgs.builder()
                            .taskKey("a")
                            .build())
                        .existingClusterId(shared.id())
                        .sparkJarTask(JobTaskSparkJarTaskArgs.builder()
                            .mainClassName("com.acme.data.Main")
                            .build())
                        .build(),
                    JobTaskArgs.builder()
                        .taskKey("c")
                        .jobClusterKey("j")
                        .notebookTask(JobTaskNotebookTaskArgs.builder()
                            .notebookPath(thisDatabricksNotebook.path())
                            .build())
                        .build(),
                    JobTaskArgs.builder()
                        .taskKey("d")
                        .pipelineTask(JobTaskPipelineTaskArgs.builder()
                            .pipelineId(thisDatabricksPipeline.id())
                            .build())
                        .build())
                .build());
    
        }
    }
    
    resources:
      this:
        type: databricks:Job
        properties:
          name: Job with multiple tasks
          description: This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished.
          jobClusters:
            - jobClusterKey: j
              newCluster:
                numWorkers: 2
                sparkVersion: ${latest.id}
                nodeTypeId: ${smallest.id}
          tasks:
            - taskKey: a
              newCluster:
                numWorkers: 1
                sparkVersion: ${latest.id}
                nodeTypeId: ${smallest.id}
              notebookTask:
                notebookPath: ${thisDatabricksNotebook.path}
            - taskKey: b
              dependsOns:
                - taskKey: a
              existingClusterId: ${shared.id}
              sparkJarTask:
                mainClassName: com.acme.data.Main
            - taskKey: c
              jobClusterKey: j
              notebookTask:
                notebookPath: ${thisDatabricksNotebook.path}
            - taskKey: d
              pipelineTask:
                pipelineId: ${thisDatabricksPipeline.id}
    

    Access Control

    By default, all users can create and modify jobs unless an administrator enables jobs access control. With jobs access control, individual permissions determine a user’s abilities.

    • databricks.Permissions can control which groups or individual users can Can View, Can Manage Run, and Can Manage.
    • databricks.ClusterPolicy can control which kinds of clusters users can create for jobs.

    Create Job Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Job(name: string, args?: JobArgs, opts?: CustomResourceOptions);
    @overload
    def Job(resource_name: str,
            args: Optional[JobArgs] = None,
            opts: Optional[ResourceOptions] = None)
    
    @overload
    def Job(resource_name: str,
            opts: Optional[ResourceOptions] = None,
            always_running: Optional[bool] = None,
            continuous: Optional[JobContinuousArgs] = None,
            control_run_state: Optional[bool] = None,
            dbt_task: Optional[JobDbtTaskArgs] = None,
            deployment: Optional[JobDeploymentArgs] = None,
            description: Optional[str] = None,
            edit_mode: Optional[str] = None,
            email_notifications: Optional[JobEmailNotificationsArgs] = None,
            environments: Optional[Sequence[JobEnvironmentArgs]] = None,
            existing_cluster_id: Optional[str] = None,
            format: Optional[str] = None,
            git_source: Optional[JobGitSourceArgs] = None,
            health: Optional[JobHealthArgs] = None,
            job_clusters: Optional[Sequence[JobJobClusterArgs]] = None,
            libraries: Optional[Sequence[JobLibraryArgs]] = None,
            max_concurrent_runs: Optional[int] = None,
            max_retries: Optional[int] = None,
            min_retry_interval_millis: Optional[int] = None,
            name: Optional[str] = None,
            new_cluster: Optional[JobNewClusterArgs] = None,
            notebook_task: Optional[JobNotebookTaskArgs] = None,
            notification_settings: Optional[JobNotificationSettingsArgs] = None,
            parameters: Optional[Sequence[JobParameterArgs]] = None,
            pipeline_task: Optional[JobPipelineTaskArgs] = None,
            python_wheel_task: Optional[JobPythonWheelTaskArgs] = None,
            queue: Optional[JobQueueArgs] = None,
            retry_on_timeout: Optional[bool] = None,
            run_as: Optional[JobRunAsArgs] = None,
            run_job_task: Optional[JobRunJobTaskArgs] = None,
            schedule: Optional[JobScheduleArgs] = None,
            spark_jar_task: Optional[JobSparkJarTaskArgs] = None,
            spark_python_task: Optional[JobSparkPythonTaskArgs] = None,
            spark_submit_task: Optional[JobSparkSubmitTaskArgs] = None,
            tags: Optional[Mapping[str, str]] = None,
            tasks: Optional[Sequence[JobTaskArgs]] = None,
            timeout_seconds: Optional[int] = None,
            trigger: Optional[JobTriggerArgs] = None,
            webhook_notifications: Optional[JobWebhookNotificationsArgs] = None)
    func NewJob(ctx *Context, name string, args *JobArgs, opts ...ResourceOption) (*Job, error)
    public Job(string name, JobArgs? args = null, CustomResourceOptions? opts = null)
    public Job(String name, JobArgs args)
    public Job(String name, JobArgs args, CustomResourceOptions options)
    
    type: databricks:Job
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var jobResource = new Databricks.Job("jobResource", new()
    {
        Continuous = new Databricks.Inputs.JobContinuousArgs
        {
            PauseStatus = "string",
        },
        ControlRunState = false,
        Deployment = new Databricks.Inputs.JobDeploymentArgs
        {
            Kind = "string",
            MetadataFilePath = "string",
        },
        Description = "string",
        EditMode = "string",
        EmailNotifications = new Databricks.Inputs.JobEmailNotificationsArgs
        {
            NoAlertForSkippedRuns = false,
            OnDurationWarningThresholdExceededs = new[]
            {
                "string",
            },
            OnFailures = new[]
            {
                "string",
            },
            OnStarts = new[]
            {
                "string",
            },
            OnStreamingBacklogExceededs = new[]
            {
                "string",
            },
            OnSuccesses = new[]
            {
                "string",
            },
        },
        Environments = new[]
        {
            new Databricks.Inputs.JobEnvironmentArgs
            {
                EnvironmentKey = "string",
                Spec = new Databricks.Inputs.JobEnvironmentSpecArgs
                {
                    Client = "string",
                    Dependencies = new[]
                    {
                        "string",
                    },
                },
            },
        },
        ExistingClusterId = "string",
        Format = "string",
        GitSource = new Databricks.Inputs.JobGitSourceArgs
        {
            Url = "string",
            Branch = "string",
            Commit = "string",
            GitSnapshot = new Databricks.Inputs.JobGitSourceGitSnapshotArgs
            {
                UsedCommit = "string",
            },
            JobSource = new Databricks.Inputs.JobGitSourceJobSourceArgs
            {
                ImportFromGitBranch = "string",
                JobConfigPath = "string",
                DirtyState = "string",
            },
            Provider = "string",
            Tag = "string",
        },
        Health = new Databricks.Inputs.JobHealthArgs
        {
            Rules = new[]
            {
                new Databricks.Inputs.JobHealthRuleArgs
                {
                    Metric = "string",
                    Op = "string",
                    Value = 0,
                },
            },
        },
        JobClusters = new[]
        {
            new Databricks.Inputs.JobJobClusterArgs
            {
                JobClusterKey = "string",
                NewCluster = new Databricks.Inputs.JobJobClusterNewClusterArgs
                {
                    SparkVersion = "string",
                    EnableLocalDiskEncryption = false,
                    ClusterLogConf = new Databricks.Inputs.JobJobClusterNewClusterClusterLogConfArgs
                    {
                        Dbfs = new Databricks.Inputs.JobJobClusterNewClusterClusterLogConfDbfsArgs
                        {
                            Destination = "string",
                        },
                        S3 = new Databricks.Inputs.JobJobClusterNewClusterClusterLogConfS3Args
                        {
                            Destination = "string",
                            CannedAcl = "string",
                            EnableEncryption = false,
                            EncryptionType = "string",
                            Endpoint = "string",
                            KmsKey = "string",
                            Region = "string",
                        },
                    },
                    GcpAttributes = new Databricks.Inputs.JobJobClusterNewClusterGcpAttributesArgs
                    {
                        Availability = "string",
                        BootDiskSize = 0,
                        GoogleServiceAccount = "string",
                        LocalSsdCount = 0,
                        UsePreemptibleExecutors = false,
                        ZoneId = "string",
                    },
                    ClusterId = "string",
                    IdempotencyToken = "string",
                    ClusterMountInfos = new[]
                    {
                        new Databricks.Inputs.JobJobClusterNewClusterClusterMountInfoArgs
                        {
                            LocalMountDirPath = "string",
                            NetworkFilesystemInfo = new Databricks.Inputs.JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoArgs
                            {
                                ServerAddress = "string",
                                MountOptions = "string",
                            },
                            RemoteMountDirPath = "string",
                        },
                    },
                    ClusterName = "string",
                    CustomTags = 
                    {
                        { "string", "string" },
                    },
                    DataSecurityMode = "string",
                    DockerImage = new Databricks.Inputs.JobJobClusterNewClusterDockerImageArgs
                    {
                        Url = "string",
                        BasicAuth = new Databricks.Inputs.JobJobClusterNewClusterDockerImageBasicAuthArgs
                        {
                            Password = "string",
                            Username = "string",
                        },
                    },
                    DriverInstancePoolId = "string",
                    InitScripts = new[]
                    {
                        new Databricks.Inputs.JobJobClusterNewClusterInitScriptArgs
                        {
                            Abfss = new Databricks.Inputs.JobJobClusterNewClusterInitScriptAbfssArgs
                            {
                                Destination = "string",
                            },
                            File = new Databricks.Inputs.JobJobClusterNewClusterInitScriptFileArgs
                            {
                                Destination = "string",
                            },
                            Gcs = new Databricks.Inputs.JobJobClusterNewClusterInitScriptGcsArgs
                            {
                                Destination = "string",
                            },
                            S3 = new Databricks.Inputs.JobJobClusterNewClusterInitScriptS3Args
                            {
                                Destination = "string",
                                CannedAcl = "string",
                                EnableEncryption = false,
                                EncryptionType = "string",
                                Endpoint = "string",
                                KmsKey = "string",
                                Region = "string",
                            },
                            Volumes = new Databricks.Inputs.JobJobClusterNewClusterInitScriptVolumesArgs
                            {
                                Destination = "string",
                            },
                            Workspace = new Databricks.Inputs.JobJobClusterNewClusterInitScriptWorkspaceArgs
                            {
                                Destination = "string",
                            },
                        },
                    },
                    EnableElasticDisk = false,
                    ApplyPolicyDefaultValues = false,
                    AzureAttributes = new Databricks.Inputs.JobJobClusterNewClusterAzureAttributesArgs
                    {
                        Availability = "string",
                        FirstOnDemand = 0,
                        LogAnalyticsInfo = new Databricks.Inputs.JobJobClusterNewClusterAzureAttributesLogAnalyticsInfoArgs
                        {
                            LogAnalyticsPrimaryKey = "string",
                            LogAnalyticsWorkspaceId = "string",
                        },
                        SpotBidMaxPrice = 0,
                    },
                    AwsAttributes = new Databricks.Inputs.JobJobClusterNewClusterAwsAttributesArgs
                    {
                        Availability = "string",
                        EbsVolumeCount = 0,
                        EbsVolumeIops = 0,
                        EbsVolumeSize = 0,
                        EbsVolumeThroughput = 0,
                        EbsVolumeType = "string",
                        FirstOnDemand = 0,
                        InstanceProfileArn = "string",
                        SpotBidPricePercent = 0,
                        ZoneId = "string",
                    },
                    DriverNodeTypeId = "string",
                    InstancePoolId = "string",
                    Libraries = new[]
                    {
                        new Databricks.Inputs.JobJobClusterNewClusterLibraryArgs
                        {
                            Cran = new Databricks.Inputs.JobJobClusterNewClusterLibraryCranArgs
                            {
                                Package = "string",
                                Repo = "string",
                            },
                            Egg = "string",
                            Jar = "string",
                            Maven = new Databricks.Inputs.JobJobClusterNewClusterLibraryMavenArgs
                            {
                                Coordinates = "string",
                                Exclusions = new[]
                                {
                                    "string",
                                },
                                Repo = "string",
                            },
                            Pypi = new Databricks.Inputs.JobJobClusterNewClusterLibraryPypiArgs
                            {
                                Package = "string",
                                Repo = "string",
                            },
                            Requirements = "string",
                            Whl = "string",
                        },
                    },
                    NodeTypeId = "string",
                    NumWorkers = 0,
                    PolicyId = "string",
                    RuntimeEngine = "string",
                    SingleUserName = "string",
                    SparkConf = 
                    {
                        { "string", "string" },
                    },
                    SparkEnvVars = 
                    {
                        { "string", "string" },
                    },
                    Autoscale = new Databricks.Inputs.JobJobClusterNewClusterAutoscaleArgs
                    {
                        MaxWorkers = 0,
                        MinWorkers = 0,
                    },
                    SshPublicKeys = new[]
                    {
                        "string",
                    },
                    WorkloadType = new Databricks.Inputs.JobJobClusterNewClusterWorkloadTypeArgs
                    {
                        Clients = new Databricks.Inputs.JobJobClusterNewClusterWorkloadTypeClientsArgs
                        {
                            Jobs = false,
                            Notebooks = false,
                        },
                    },
                },
            },
        },
        Libraries = new[]
        {
            new Databricks.Inputs.JobLibraryArgs
            {
                Cran = new Databricks.Inputs.JobLibraryCranArgs
                {
                    Package = "string",
                    Repo = "string",
                },
                Egg = "string",
                Jar = "string",
                Maven = new Databricks.Inputs.JobLibraryMavenArgs
                {
                    Coordinates = "string",
                    Exclusions = new[]
                    {
                        "string",
                    },
                    Repo = "string",
                },
                Pypi = new Databricks.Inputs.JobLibraryPypiArgs
                {
                    Package = "string",
                    Repo = "string",
                },
                Requirements = "string",
                Whl = "string",
            },
        },
        MaxConcurrentRuns = 0,
        Name = "string",
        NewCluster = new Databricks.Inputs.JobNewClusterArgs
        {
            SparkVersion = "string",
            EnableLocalDiskEncryption = false,
            ClusterLogConf = new Databricks.Inputs.JobNewClusterClusterLogConfArgs
            {
                Dbfs = new Databricks.Inputs.JobNewClusterClusterLogConfDbfsArgs
                {
                    Destination = "string",
                },
                S3 = new Databricks.Inputs.JobNewClusterClusterLogConfS3Args
                {
                    Destination = "string",
                    CannedAcl = "string",
                    EnableEncryption = false,
                    EncryptionType = "string",
                    Endpoint = "string",
                    KmsKey = "string",
                    Region = "string",
                },
            },
            GcpAttributes = new Databricks.Inputs.JobNewClusterGcpAttributesArgs
            {
                Availability = "string",
                BootDiskSize = 0,
                GoogleServiceAccount = "string",
                LocalSsdCount = 0,
                UsePreemptibleExecutors = false,
                ZoneId = "string",
            },
            ClusterId = "string",
            IdempotencyToken = "string",
            ClusterMountInfos = new[]
            {
                new Databricks.Inputs.JobNewClusterClusterMountInfoArgs
                {
                    LocalMountDirPath = "string",
                    NetworkFilesystemInfo = new Databricks.Inputs.JobNewClusterClusterMountInfoNetworkFilesystemInfoArgs
                    {
                        ServerAddress = "string",
                        MountOptions = "string",
                    },
                    RemoteMountDirPath = "string",
                },
            },
            ClusterName = "string",
            CustomTags = 
            {
                { "string", "string" },
            },
            DataSecurityMode = "string",
            DockerImage = new Databricks.Inputs.JobNewClusterDockerImageArgs
            {
                Url = "string",
                BasicAuth = new Databricks.Inputs.JobNewClusterDockerImageBasicAuthArgs
                {
                    Password = "string",
                    Username = "string",
                },
            },
            DriverInstancePoolId = "string",
            InitScripts = new[]
            {
                new Databricks.Inputs.JobNewClusterInitScriptArgs
                {
                    Abfss = new Databricks.Inputs.JobNewClusterInitScriptAbfssArgs
                    {
                        Destination = "string",
                    },
                    File = new Databricks.Inputs.JobNewClusterInitScriptFileArgs
                    {
                        Destination = "string",
                    },
                    Gcs = new Databricks.Inputs.JobNewClusterInitScriptGcsArgs
                    {
                        Destination = "string",
                    },
                    S3 = new Databricks.Inputs.JobNewClusterInitScriptS3Args
                    {
                        Destination = "string",
                        CannedAcl = "string",
                        EnableEncryption = false,
                        EncryptionType = "string",
                        Endpoint = "string",
                        KmsKey = "string",
                        Region = "string",
                    },
                    Volumes = new Databricks.Inputs.JobNewClusterInitScriptVolumesArgs
                    {
                        Destination = "string",
                    },
                    Workspace = new Databricks.Inputs.JobNewClusterInitScriptWorkspaceArgs
                    {
                        Destination = "string",
                    },
                },
            },
            EnableElasticDisk = false,
            ApplyPolicyDefaultValues = false,
            AzureAttributes = new Databricks.Inputs.JobNewClusterAzureAttributesArgs
            {
                Availability = "string",
                FirstOnDemand = 0,
                LogAnalyticsInfo = new Databricks.Inputs.JobNewClusterAzureAttributesLogAnalyticsInfoArgs
                {
                    LogAnalyticsPrimaryKey = "string",
                    LogAnalyticsWorkspaceId = "string",
                },
                SpotBidMaxPrice = 0,
            },
            AwsAttributes = new Databricks.Inputs.JobNewClusterAwsAttributesArgs
            {
                Availability = "string",
                EbsVolumeCount = 0,
                EbsVolumeIops = 0,
                EbsVolumeSize = 0,
                EbsVolumeThroughput = 0,
                EbsVolumeType = "string",
                FirstOnDemand = 0,
                InstanceProfileArn = "string",
                SpotBidPricePercent = 0,
                ZoneId = "string",
            },
            DriverNodeTypeId = "string",
            InstancePoolId = "string",
            Libraries = new[]
            {
                new Databricks.Inputs.JobNewClusterLibraryArgs
                {
                    Cran = new Databricks.Inputs.JobNewClusterLibraryCranArgs
                    {
                        Package = "string",
                        Repo = "string",
                    },
                    Egg = "string",
                    Jar = "string",
                    Maven = new Databricks.Inputs.JobNewClusterLibraryMavenArgs
                    {
                        Coordinates = "string",
                        Exclusions = new[]
                        {
                            "string",
                        },
                        Repo = "string",
                    },
                    Pypi = new Databricks.Inputs.JobNewClusterLibraryPypiArgs
                    {
                        Package = "string",
                        Repo = "string",
                    },
                    Requirements = "string",
                    Whl = "string",
                },
            },
            NodeTypeId = "string",
            NumWorkers = 0,
            PolicyId = "string",
            RuntimeEngine = "string",
            SingleUserName = "string",
            SparkConf = 
            {
                { "string", "string" },
            },
            SparkEnvVars = 
            {
                { "string", "string" },
            },
            Autoscale = new Databricks.Inputs.JobNewClusterAutoscaleArgs
            {
                MaxWorkers = 0,
                MinWorkers = 0,
            },
            SshPublicKeys = new[]
            {
                "string",
            },
            WorkloadType = new Databricks.Inputs.JobNewClusterWorkloadTypeArgs
            {
                Clients = new Databricks.Inputs.JobNewClusterWorkloadTypeClientsArgs
                {
                    Jobs = false,
                    Notebooks = false,
                },
            },
        },
        NotificationSettings = new Databricks.Inputs.JobNotificationSettingsArgs
        {
            NoAlertForCanceledRuns = false,
            NoAlertForSkippedRuns = false,
        },
        Parameters = new[]
        {
            new Databricks.Inputs.JobParameterArgs
            {
                Default = "string",
                Name = "string",
            },
        },
        Queue = new Databricks.Inputs.JobQueueArgs
        {
            Enabled = false,
        },
        RunAs = new Databricks.Inputs.JobRunAsArgs
        {
            ServicePrincipalName = "string",
            UserName = "string",
        },
        Schedule = new Databricks.Inputs.JobScheduleArgs
        {
            QuartzCronExpression = "string",
            TimezoneId = "string",
            PauseStatus = "string",
        },
        Tags = 
        {
            { "string", "string" },
        },
        Tasks = new[]
        {
            new Databricks.Inputs.JobTaskArgs
            {
                TaskKey = "string",
                NewCluster = new Databricks.Inputs.JobTaskNewClusterArgs
                {
                    SparkVersion = "string",
                    EnableLocalDiskEncryption = false,
                    ClusterLogConf = new Databricks.Inputs.JobTaskNewClusterClusterLogConfArgs
                    {
                        Dbfs = new Databricks.Inputs.JobTaskNewClusterClusterLogConfDbfsArgs
                        {
                            Destination = "string",
                        },
                        S3 = new Databricks.Inputs.JobTaskNewClusterClusterLogConfS3Args
                        {
                            Destination = "string",
                            CannedAcl = "string",
                            EnableEncryption = false,
                            EncryptionType = "string",
                            Endpoint = "string",
                            KmsKey = "string",
                            Region = "string",
                        },
                    },
                    GcpAttributes = new Databricks.Inputs.JobTaskNewClusterGcpAttributesArgs
                    {
                        Availability = "string",
                        BootDiskSize = 0,
                        GoogleServiceAccount = "string",
                        LocalSsdCount = 0,
                        UsePreemptibleExecutors = false,
                        ZoneId = "string",
                    },
                    ClusterId = "string",
                    IdempotencyToken = "string",
                    ClusterMountInfos = new[]
                    {
                        new Databricks.Inputs.JobTaskNewClusterClusterMountInfoArgs
                        {
                            LocalMountDirPath = "string",
                            NetworkFilesystemInfo = new Databricks.Inputs.JobTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs
                            {
                                ServerAddress = "string",
                                MountOptions = "string",
                            },
                            RemoteMountDirPath = "string",
                        },
                    },
                    ClusterName = "string",
                    CustomTags = 
                    {
                        { "string", "string" },
                    },
                    DataSecurityMode = "string",
                    DockerImage = new Databricks.Inputs.JobTaskNewClusterDockerImageArgs
                    {
                        Url = "string",
                        BasicAuth = new Databricks.Inputs.JobTaskNewClusterDockerImageBasicAuthArgs
                        {
                            Password = "string",
                            Username = "string",
                        },
                    },
                    DriverInstancePoolId = "string",
                    InitScripts = new[]
                    {
                        new Databricks.Inputs.JobTaskNewClusterInitScriptArgs
                        {
                            Abfss = new Databricks.Inputs.JobTaskNewClusterInitScriptAbfssArgs
                            {
                                Destination = "string",
                            },
                            File = new Databricks.Inputs.JobTaskNewClusterInitScriptFileArgs
                            {
                                Destination = "string",
                            },
                            Gcs = new Databricks.Inputs.JobTaskNewClusterInitScriptGcsArgs
                            {
                                Destination = "string",
                            },
                            S3 = new Databricks.Inputs.JobTaskNewClusterInitScriptS3Args
                            {
                                Destination = "string",
                                CannedAcl = "string",
                                EnableEncryption = false,
                                EncryptionType = "string",
                                Endpoint = "string",
                                KmsKey = "string",
                                Region = "string",
                            },
                            Volumes = new Databricks.Inputs.JobTaskNewClusterInitScriptVolumesArgs
                            {
                                Destination = "string",
                            },
                            Workspace = new Databricks.Inputs.JobTaskNewClusterInitScriptWorkspaceArgs
                            {
                                Destination = "string",
                            },
                        },
                    },
                    EnableElasticDisk = false,
                    ApplyPolicyDefaultValues = false,
                    AzureAttributes = new Databricks.Inputs.JobTaskNewClusterAzureAttributesArgs
                    {
                        Availability = "string",
                        FirstOnDemand = 0,
                        LogAnalyticsInfo = new Databricks.Inputs.JobTaskNewClusterAzureAttributesLogAnalyticsInfoArgs
                        {
                            LogAnalyticsPrimaryKey = "string",
                            LogAnalyticsWorkspaceId = "string",
                        },
                        SpotBidMaxPrice = 0,
                    },
                    AwsAttributes = new Databricks.Inputs.JobTaskNewClusterAwsAttributesArgs
                    {
                        Availability = "string",
                        EbsVolumeCount = 0,
                        EbsVolumeIops = 0,
                        EbsVolumeSize = 0,
                        EbsVolumeThroughput = 0,
                        EbsVolumeType = "string",
                        FirstOnDemand = 0,
                        InstanceProfileArn = "string",
                        SpotBidPricePercent = 0,
                        ZoneId = "string",
                    },
                    DriverNodeTypeId = "string",
                    InstancePoolId = "string",
                    Libraries = new[]
                    {
                        new Databricks.Inputs.JobTaskNewClusterLibraryArgs
                        {
                            Cran = new Databricks.Inputs.JobTaskNewClusterLibraryCranArgs
                            {
                                Package = "string",
                                Repo = "string",
                            },
                            Egg = "string",
                            Jar = "string",
                            Maven = new Databricks.Inputs.JobTaskNewClusterLibraryMavenArgs
                            {
                                Coordinates = "string",
                                Exclusions = new[]
                                {
                                    "string",
                                },
                                Repo = "string",
                            },
                            Pypi = new Databricks.Inputs.JobTaskNewClusterLibraryPypiArgs
                            {
                                Package = "string",
                                Repo = "string",
                            },
                            Requirements = "string",
                            Whl = "string",
                        },
                    },
                    NodeTypeId = "string",
                    NumWorkers = 0,
                    PolicyId = "string",
                    RuntimeEngine = "string",
                    SingleUserName = "string",
                    SparkConf = 
                    {
                        { "string", "string" },
                    },
                    SparkEnvVars = 
                    {
                        { "string", "string" },
                    },
                    Autoscale = new Databricks.Inputs.JobTaskNewClusterAutoscaleArgs
                    {
                        MaxWorkers = 0,
                        MinWorkers = 0,
                    },
                    SshPublicKeys = new[]
                    {
                        "string",
                    },
                    WorkloadType = new Databricks.Inputs.JobTaskNewClusterWorkloadTypeArgs
                    {
                        Clients = new Databricks.Inputs.JobTaskNewClusterWorkloadTypeClientsArgs
                        {
                            Jobs = false,
                            Notebooks = false,
                        },
                    },
                },
                DbtTask = new Databricks.Inputs.JobTaskDbtTaskArgs
                {
                    Commands = new[]
                    {
                        "string",
                    },
                    Catalog = "string",
                    ProfilesDirectory = "string",
                    ProjectDirectory = "string",
                    Schema = "string",
                    Source = "string",
                    WarehouseId = "string",
                },
                Description = "string",
                DisableAutoOptimization = false,
                EmailNotifications = new Databricks.Inputs.JobTaskEmailNotificationsArgs
                {
                    NoAlertForSkippedRuns = false,
                    OnDurationWarningThresholdExceededs = new[]
                    {
                        "string",
                    },
                    OnFailures = new[]
                    {
                        "string",
                    },
                    OnStarts = new[]
                    {
                        "string",
                    },
                    OnStreamingBacklogExceededs = new[]
                    {
                        "string",
                    },
                    OnSuccesses = new[]
                    {
                        "string",
                    },
                },
                EnvironmentKey = "string",
                ExistingClusterId = "string",
                ForEachTask = new Databricks.Inputs.JobTaskForEachTaskArgs
                {
                    Inputs = "string",
                    Task = new Databricks.Inputs.JobTaskForEachTaskTaskArgs
                    {
                        TaskKey = "string",
                        NotebookTask = new Databricks.Inputs.JobTaskForEachTaskTaskNotebookTaskArgs
                        {
                            NotebookPath = "string",
                            BaseParameters = 
                            {
                                { "string", "string" },
                            },
                            Source = "string",
                            WarehouseId = "string",
                        },
                        WebhookNotifications = new Databricks.Inputs.JobTaskForEachTaskTaskWebhookNotificationsArgs
                        {
                            OnDurationWarningThresholdExceededs = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs
                                {
                                    Id = "string",
                                },
                            },
                            OnFailures = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskWebhookNotificationsOnFailureArgs
                                {
                                    Id = "string",
                                },
                            },
                            OnStarts = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskWebhookNotificationsOnStartArgs
                                {
                                    Id = "string",
                                },
                            },
                            OnStreamingBacklogExceededs = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceededArgs
                                {
                                    Id = "string",
                                },
                            },
                            OnSuccesses = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskWebhookNotificationsOnSuccessArgs
                                {
                                    Id = "string",
                                },
                            },
                        },
                        NewCluster = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterArgs
                        {
                            SparkVersion = "string",
                            EnableLocalDiskEncryption = false,
                            ClusterLogConf = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterClusterLogConfArgs
                            {
                                Dbfs = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterClusterLogConfDbfsArgs
                                {
                                    Destination = "string",
                                },
                                S3 = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterClusterLogConfS3Args
                                {
                                    Destination = "string",
                                    CannedAcl = "string",
                                    EnableEncryption = false,
                                    EncryptionType = "string",
                                    Endpoint = "string",
                                    KmsKey = "string",
                                    Region = "string",
                                },
                            },
                            GcpAttributes = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterGcpAttributesArgs
                            {
                                Availability = "string",
                                BootDiskSize = 0,
                                GoogleServiceAccount = "string",
                                LocalSsdCount = 0,
                                UsePreemptibleExecutors = false,
                                ZoneId = "string",
                            },
                            ClusterId = "string",
                            IdempotencyToken = "string",
                            ClusterMountInfos = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterClusterMountInfoArgs
                                {
                                    LocalMountDirPath = "string",
                                    NetworkFilesystemInfo = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs
                                    {
                                        ServerAddress = "string",
                                        MountOptions = "string",
                                    },
                                    RemoteMountDirPath = "string",
                                },
                            },
                            ClusterName = "string",
                            CustomTags = 
                            {
                                { "string", "string" },
                            },
                            DataSecurityMode = "string",
                            DockerImage = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterDockerImageArgs
                            {
                                Url = "string",
                                BasicAuth = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterDockerImageBasicAuthArgs
                                {
                                    Password = "string",
                                    Username = "string",
                                },
                            },
                            DriverInstancePoolId = "string",
                            InitScripts = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptArgs
                                {
                                    Abfss = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptAbfssArgs
                                    {
                                        Destination = "string",
                                    },
                                    File = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptFileArgs
                                    {
                                        Destination = "string",
                                    },
                                    Gcs = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptGcsArgs
                                    {
                                        Destination = "string",
                                    },
                                    S3 = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptS3Args
                                    {
                                        Destination = "string",
                                        CannedAcl = "string",
                                        EnableEncryption = false,
                                        EncryptionType = "string",
                                        Endpoint = "string",
                                        KmsKey = "string",
                                        Region = "string",
                                    },
                                    Volumes = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptVolumesArgs
                                    {
                                        Destination = "string",
                                    },
                                    Workspace = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptWorkspaceArgs
                                    {
                                        Destination = "string",
                                    },
                                },
                            },
                            EnableElasticDisk = false,
                            ApplyPolicyDefaultValues = false,
                            AzureAttributes = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterAzureAttributesArgs
                            {
                                Availability = "string",
                                FirstOnDemand = 0,
                                LogAnalyticsInfo = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterAzureAttributesLogAnalyticsInfoArgs
                                {
                                    LogAnalyticsPrimaryKey = "string",
                                    LogAnalyticsWorkspaceId = "string",
                                },
                                SpotBidMaxPrice = 0,
                            },
                            AwsAttributes = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterAwsAttributesArgs
                            {
                                Availability = "string",
                                EbsVolumeCount = 0,
                                EbsVolumeIops = 0,
                                EbsVolumeSize = 0,
                                EbsVolumeThroughput = 0,
                                EbsVolumeType = "string",
                                FirstOnDemand = 0,
                                InstanceProfileArn = "string",
                                SpotBidPricePercent = 0,
                                ZoneId = "string",
                            },
                            DriverNodeTypeId = "string",
                            InstancePoolId = "string",
                            Libraries = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterLibraryArgs
                                {
                                    Cran = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterLibraryCranArgs
                                    {
                                        Package = "string",
                                        Repo = "string",
                                    },
                                    Egg = "string",
                                    Jar = "string",
                                    Maven = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterLibraryMavenArgs
                                    {
                                        Coordinates = "string",
                                        Exclusions = new[]
                                        {
                                            "string",
                                        },
                                        Repo = "string",
                                    },
                                    Pypi = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterLibraryPypiArgs
                                    {
                                        Package = "string",
                                        Repo = "string",
                                    },
                                    Requirements = "string",
                                    Whl = "string",
                                },
                            },
                            NodeTypeId = "string",
                            NumWorkers = 0,
                            PolicyId = "string",
                            RuntimeEngine = "string",
                            SingleUserName = "string",
                            SparkConf = 
                            {
                                { "string", "string" },
                            },
                            SparkEnvVars = 
                            {
                                { "string", "string" },
                            },
                            Autoscale = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterAutoscaleArgs
                            {
                                MaxWorkers = 0,
                                MinWorkers = 0,
                            },
                            SshPublicKeys = new[]
                            {
                                "string",
                            },
                            WorkloadType = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterWorkloadTypeArgs
                            {
                                Clients = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterWorkloadTypeClientsArgs
                                {
                                    Jobs = false,
                                    Notebooks = false,
                                },
                            },
                        },
                        DisableAutoOptimization = false,
                        EmailNotifications = new Databricks.Inputs.JobTaskForEachTaskTaskEmailNotificationsArgs
                        {
                            NoAlertForSkippedRuns = false,
                            OnDurationWarningThresholdExceededs = new[]
                            {
                                "string",
                            },
                            OnFailures = new[]
                            {
                                "string",
                            },
                            OnStarts = new[]
                            {
                                "string",
                            },
                            OnStreamingBacklogExceededs = new[]
                            {
                                "string",
                            },
                            OnSuccesses = new[]
                            {
                                "string",
                            },
                        },
                        EnvironmentKey = "string",
                        ExistingClusterId = "string",
                        Health = new Databricks.Inputs.JobTaskForEachTaskTaskHealthArgs
                        {
                            Rules = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskHealthRuleArgs
                                {
                                    Metric = "string",
                                    Op = "string",
                                    Value = 0,
                                },
                            },
                        },
                        JobClusterKey = "string",
                        Libraries = new[]
                        {
                            new Databricks.Inputs.JobTaskForEachTaskTaskLibraryArgs
                            {
                                Cran = new Databricks.Inputs.JobTaskForEachTaskTaskLibraryCranArgs
                                {
                                    Package = "string",
                                    Repo = "string",
                                },
                                Egg = "string",
                                Jar = "string",
                                Maven = new Databricks.Inputs.JobTaskForEachTaskTaskLibraryMavenArgs
                                {
                                    Coordinates = "string",
                                    Exclusions = new[]
                                    {
                                        "string",
                                    },
                                    Repo = "string",
                                },
                                Pypi = new Databricks.Inputs.JobTaskForEachTaskTaskLibraryPypiArgs
                                {
                                    Package = "string",
                                    Repo = "string",
                                },
                                Requirements = "string",
                                Whl = "string",
                            },
                        },
                        MaxRetries = 0,
                        MinRetryIntervalMillis = 0,
                        Description = "string",
                        DependsOns = new[]
                        {
                            new Databricks.Inputs.JobTaskForEachTaskTaskDependsOnArgs
                            {
                                TaskKey = "string",
                                Outcome = "string",
                            },
                        },
                        SparkPythonTask = new Databricks.Inputs.JobTaskForEachTaskTaskSparkPythonTaskArgs
                        {
                            PythonFile = "string",
                            Parameters = new[]
                            {
                                "string",
                            },
                            Source = "string",
                        },
                        PipelineTask = new Databricks.Inputs.JobTaskForEachTaskTaskPipelineTaskArgs
                        {
                            PipelineId = "string",
                            FullRefresh = false,
                        },
                        PythonWheelTask = new Databricks.Inputs.JobTaskForEachTaskTaskPythonWheelTaskArgs
                        {
                            EntryPoint = "string",
                            NamedParameters = 
                            {
                                { "string", "string" },
                            },
                            PackageName = "string",
                            Parameters = new[]
                            {
                                "string",
                            },
                        },
                        RetryOnTimeout = false,
                        RunIf = "string",
                        RunJobTask = new Databricks.Inputs.JobTaskForEachTaskTaskRunJobTaskArgs
                        {
                            JobId = 0,
                            DbtCommands = new[]
                            {
                                "string",
                            },
                            JarParams = new[]
                            {
                                "string",
                            },
                            JobParameters = 
                            {
                                { "string", "string" },
                            },
                            NotebookParams = 
                            {
                                { "string", "string" },
                            },
                            PipelineParams = new Databricks.Inputs.JobTaskForEachTaskTaskRunJobTaskPipelineParamsArgs
                            {
                                FullRefresh = false,
                            },
                            PythonNamedParams = 
                            {
                                { "string", "string" },
                            },
                            PythonParams = new[]
                            {
                                "string",
                            },
                            SparkSubmitParams = new[]
                            {
                                "string",
                            },
                            SqlParams = 
                            {
                                { "string", "string" },
                            },
                        },
                        SparkJarTask = new Databricks.Inputs.JobTaskForEachTaskTaskSparkJarTaskArgs
                        {
                            JarUri = "string",
                            MainClassName = "string",
                            Parameters = new[]
                            {
                                "string",
                            },
                        },
                        NotificationSettings = new Databricks.Inputs.JobTaskForEachTaskTaskNotificationSettingsArgs
                        {
                            AlertOnLastAttempt = false,
                            NoAlertForCanceledRuns = false,
                            NoAlertForSkippedRuns = false,
                        },
                        SparkSubmitTask = new Databricks.Inputs.JobTaskForEachTaskTaskSparkSubmitTaskArgs
                        {
                            Parameters = new[]
                            {
                                "string",
                            },
                        },
                        SqlTask = new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskArgs
                        {
                            WarehouseId = "string",
                            Alert = new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskAlertArgs
                            {
                                AlertId = "string",
                                Subscriptions = new[]
                                {
                                    new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskAlertSubscriptionArgs
                                    {
                                        DestinationId = "string",
                                        UserName = "string",
                                    },
                                },
                                PauseSubscriptions = false,
                            },
                            Dashboard = new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskDashboardArgs
                            {
                                DashboardId = "string",
                                CustomSubject = "string",
                                PauseSubscriptions = false,
                                Subscriptions = new[]
                                {
                                    new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskDashboardSubscriptionArgs
                                    {
                                        DestinationId = "string",
                                        UserName = "string",
                                    },
                                },
                            },
                            File = new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskFileArgs
                            {
                                Path = "string",
                                Source = "string",
                            },
                            Parameters = 
                            {
                                { "string", "string" },
                            },
                            Query = new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskQueryArgs
                            {
                                QueryId = "string",
                            },
                        },
                        DbtTask = new Databricks.Inputs.JobTaskForEachTaskTaskDbtTaskArgs
                        {
                            Commands = new[]
                            {
                                "string",
                            },
                            Catalog = "string",
                            ProfilesDirectory = "string",
                            ProjectDirectory = "string",
                            Schema = "string",
                            Source = "string",
                            WarehouseId = "string",
                        },
                        TimeoutSeconds = 0,
                        ConditionTask = new Databricks.Inputs.JobTaskForEachTaskTaskConditionTaskArgs
                        {
                            Left = "string",
                            Op = "string",
                            Right = "string",
                        },
                    },
                    Concurrency = 0,
                },
                Health = new Databricks.Inputs.JobTaskHealthArgs
                {
                    Rules = new[]
                    {
                        new Databricks.Inputs.JobTaskHealthRuleArgs
                        {
                            Metric = "string",
                            Op = "string",
                            Value = 0,
                        },
                    },
                },
                JobClusterKey = "string",
                Libraries = new[]
                {
                    new Databricks.Inputs.JobTaskLibraryArgs
                    {
                        Cran = new Databricks.Inputs.JobTaskLibraryCranArgs
                        {
                            Package = "string",
                            Repo = "string",
                        },
                        Egg = "string",
                        Jar = "string",
                        Maven = new Databricks.Inputs.JobTaskLibraryMavenArgs
                        {
                            Coordinates = "string",
                            Exclusions = new[]
                            {
                                "string",
                            },
                            Repo = "string",
                        },
                        Pypi = new Databricks.Inputs.JobTaskLibraryPypiArgs
                        {
                            Package = "string",
                            Repo = "string",
                        },
                        Requirements = "string",
                        Whl = "string",
                    },
                },
                MaxRetries = 0,
                WebhookNotifications = new Databricks.Inputs.JobTaskWebhookNotificationsArgs
                {
                    OnDurationWarningThresholdExceededs = new[]
                    {
                        new Databricks.Inputs.JobTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs
                        {
                            Id = "string",
                        },
                    },
                    OnFailures = new[]
                    {
                        new Databricks.Inputs.JobTaskWebhookNotificationsOnFailureArgs
                        {
                            Id = "string",
                        },
                    },
                    OnStarts = new[]
                    {
                        new Databricks.Inputs.JobTaskWebhookNotificationsOnStartArgs
                        {
                            Id = "string",
                        },
                    },
                    OnStreamingBacklogExceededs = new[]
                    {
                        new Databricks.Inputs.JobTaskWebhookNotificationsOnStreamingBacklogExceededArgs
                        {
                            Id = "string",
                        },
                    },
                    OnSuccesses = new[]
                    {
                        new Databricks.Inputs.JobTaskWebhookNotificationsOnSuccessArgs
                        {
                            Id = "string",
                        },
                    },
                },
                DependsOns = new[]
                {
                    new Databricks.Inputs.JobTaskDependsOnArgs
                    {
                        TaskKey = "string",
                        Outcome = "string",
                    },
                },
                RetryOnTimeout = false,
                NotificationSettings = new Databricks.Inputs.JobTaskNotificationSettingsArgs
                {
                    AlertOnLastAttempt = false,
                    NoAlertForCanceledRuns = false,
                    NoAlertForSkippedRuns = false,
                },
                PipelineTask = new Databricks.Inputs.JobTaskPipelineTaskArgs
                {
                    PipelineId = "string",
                    FullRefresh = false,
                },
                PythonWheelTask = new Databricks.Inputs.JobTaskPythonWheelTaskArgs
                {
                    EntryPoint = "string",
                    NamedParameters = 
                    {
                        { "string", "string" },
                    },
                    PackageName = "string",
                    Parameters = new[]
                    {
                        "string",
                    },
                },
                NotebookTask = new Databricks.Inputs.JobTaskNotebookTaskArgs
                {
                    NotebookPath = "string",
                    BaseParameters = 
                    {
                        { "string", "string" },
                    },
                    Source = "string",
                    WarehouseId = "string",
                },
                RunIf = "string",
                RunJobTask = new Databricks.Inputs.JobTaskRunJobTaskArgs
                {
                    JobId = 0,
                    DbtCommands = new[]
                    {
                        "string",
                    },
                    JarParams = new[]
                    {
                        "string",
                    },
                    JobParameters = 
                    {
                        { "string", "string" },
                    },
                    NotebookParams = 
                    {
                        { "string", "string" },
                    },
                    PipelineParams = new Databricks.Inputs.JobTaskRunJobTaskPipelineParamsArgs
                    {
                        FullRefresh = false,
                    },
                    PythonNamedParams = 
                    {
                        { "string", "string" },
                    },
                    PythonParams = new[]
                    {
                        "string",
                    },
                    SparkSubmitParams = new[]
                    {
                        "string",
                    },
                    SqlParams = 
                    {
                        { "string", "string" },
                    },
                },
                SparkJarTask = new Databricks.Inputs.JobTaskSparkJarTaskArgs
                {
                    JarUri = "string",
                    MainClassName = "string",
                    Parameters = new[]
                    {
                        "string",
                    },
                },
                SparkPythonTask = new Databricks.Inputs.JobTaskSparkPythonTaskArgs
                {
                    PythonFile = "string",
                    Parameters = new[]
                    {
                        "string",
                    },
                    Source = "string",
                },
                SparkSubmitTask = new Databricks.Inputs.JobTaskSparkSubmitTaskArgs
                {
                    Parameters = new[]
                    {
                        "string",
                    },
                },
                SqlTask = new Databricks.Inputs.JobTaskSqlTaskArgs
                {
                    WarehouseId = "string",
                    Alert = new Databricks.Inputs.JobTaskSqlTaskAlertArgs
                    {
                        AlertId = "string",
                        Subscriptions = new[]
                        {
                            new Databricks.Inputs.JobTaskSqlTaskAlertSubscriptionArgs
                            {
                                DestinationId = "string",
                                UserName = "string",
                            },
                        },
                        PauseSubscriptions = false,
                    },
                    Dashboard = new Databricks.Inputs.JobTaskSqlTaskDashboardArgs
                    {
                        DashboardId = "string",
                        CustomSubject = "string",
                        PauseSubscriptions = false,
                        Subscriptions = new[]
                        {
                            new Databricks.Inputs.JobTaskSqlTaskDashboardSubscriptionArgs
                            {
                                DestinationId = "string",
                                UserName = "string",
                            },
                        },
                    },
                    File = new Databricks.Inputs.JobTaskSqlTaskFileArgs
                    {
                        Path = "string",
                        Source = "string",
                    },
                    Parameters = 
                    {
                        { "string", "string" },
                    },
                    Query = new Databricks.Inputs.JobTaskSqlTaskQueryArgs
                    {
                        QueryId = "string",
                    },
                },
                ConditionTask = new Databricks.Inputs.JobTaskConditionTaskArgs
                {
                    Left = "string",
                    Op = "string",
                    Right = "string",
                },
                TimeoutSeconds = 0,
                MinRetryIntervalMillis = 0,
            },
        },
        TimeoutSeconds = 0,
        Trigger = new Databricks.Inputs.JobTriggerArgs
        {
            FileArrival = new Databricks.Inputs.JobTriggerFileArrivalArgs
            {
                Url = "string",
                MinTimeBetweenTriggersSeconds = 0,
                WaitAfterLastChangeSeconds = 0,
            },
            PauseStatus = "string",
            Periodic = new Databricks.Inputs.JobTriggerPeriodicArgs
            {
                Interval = 0,
                Unit = "string",
            },
            Table = new Databricks.Inputs.JobTriggerTableArgs
            {
                Condition = "string",
                MinTimeBetweenTriggersSeconds = 0,
                TableNames = new[]
                {
                    "string",
                },
                WaitAfterLastChangeSeconds = 0,
            },
            TableUpdate = new Databricks.Inputs.JobTriggerTableUpdateArgs
            {
                TableNames = new[]
                {
                    "string",
                },
                Condition = "string",
                MinTimeBetweenTriggersSeconds = 0,
                WaitAfterLastChangeSeconds = 0,
            },
        },
        WebhookNotifications = new Databricks.Inputs.JobWebhookNotificationsArgs
        {
            OnDurationWarningThresholdExceededs = new[]
            {
                new Databricks.Inputs.JobWebhookNotificationsOnDurationWarningThresholdExceededArgs
                {
                    Id = "string",
                },
            },
            OnFailures = new[]
            {
                new Databricks.Inputs.JobWebhookNotificationsOnFailureArgs
                {
                    Id = "string",
                },
            },
            OnStarts = new[]
            {
                new Databricks.Inputs.JobWebhookNotificationsOnStartArgs
                {
                    Id = "string",
                },
            },
            OnStreamingBacklogExceededs = new[]
            {
                new Databricks.Inputs.JobWebhookNotificationsOnStreamingBacklogExceededArgs
                {
                    Id = "string",
                },
            },
            OnSuccesses = new[]
            {
                new Databricks.Inputs.JobWebhookNotificationsOnSuccessArgs
                {
                    Id = "string",
                },
            },
        },
    });
    
    example, err := databricks.NewJob(ctx, "jobResource", &databricks.JobArgs{
    	Continuous: &databricks.JobContinuousArgs{
    		PauseStatus: pulumi.String("string"),
    	},
    	ControlRunState: pulumi.Bool(false),
    	Deployment: &databricks.JobDeploymentArgs{
    		Kind:             pulumi.String("string"),
    		MetadataFilePath: pulumi.String("string"),
    	},
    	Description: pulumi.String("string"),
    	EditMode:    pulumi.String("string"),
    	EmailNotifications: &databricks.JobEmailNotificationsArgs{
    		NoAlertForSkippedRuns: pulumi.Bool(false),
    		OnDurationWarningThresholdExceededs: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		OnFailures: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		OnStarts: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		OnStreamingBacklogExceededs: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		OnSuccesses: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    	},
    	Environments: databricks.JobEnvironmentArray{
    		&databricks.JobEnvironmentArgs{
    			EnvironmentKey: pulumi.String("string"),
    			Spec: &databricks.JobEnvironmentSpecArgs{
    				Client: pulumi.String("string"),
    				Dependencies: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    			},
    		},
    	},
    	ExistingClusterId: pulumi.String("string"),
    	Format:            pulumi.String("string"),
    	GitSource: &databricks.JobGitSourceArgs{
    		Url:    pulumi.String("string"),
    		Branch: pulumi.String("string"),
    		Commit: pulumi.String("string"),
    		GitSnapshot: &databricks.JobGitSourceGitSnapshotArgs{
    			UsedCommit: pulumi.String("string"),
    		},
    		JobSource: &databricks.JobGitSourceJobSourceArgs{
    			ImportFromGitBranch: pulumi.String("string"),
    			JobConfigPath:       pulumi.String("string"),
    			DirtyState:          pulumi.String("string"),
    		},
    		Provider: pulumi.String("string"),
    		Tag:      pulumi.String("string"),
    	},
    	Health: &databricks.JobHealthArgs{
    		Rules: databricks.JobHealthRuleArray{
    			&databricks.JobHealthRuleArgs{
    				Metric: pulumi.String("string"),
    				Op:     pulumi.String("string"),
    				Value:  pulumi.Int(0),
    			},
    		},
    	},
    	JobClusters: databricks.JobJobClusterArray{
    		&databricks.JobJobClusterArgs{
    			JobClusterKey: pulumi.String("string"),
    			NewCluster: &databricks.JobJobClusterNewClusterArgs{
    				SparkVersion:              pulumi.String("string"),
    				EnableLocalDiskEncryption: pulumi.Bool(false),
    				ClusterLogConf: &databricks.JobJobClusterNewClusterClusterLogConfArgs{
    					Dbfs: &databricks.JobJobClusterNewClusterClusterLogConfDbfsArgs{
    						Destination: pulumi.String("string"),
    					},
    					S3: &databricks.JobJobClusterNewClusterClusterLogConfS3Args{
    						Destination:      pulumi.String("string"),
    						CannedAcl:        pulumi.String("string"),
    						EnableEncryption: pulumi.Bool(false),
    						EncryptionType:   pulumi.String("string"),
    						Endpoint:         pulumi.String("string"),
    						KmsKey:           pulumi.String("string"),
    						Region:           pulumi.String("string"),
    					},
    				},
    				GcpAttributes: &databricks.JobJobClusterNewClusterGcpAttributesArgs{
    					Availability:            pulumi.String("string"),
    					BootDiskSize:            pulumi.Int(0),
    					GoogleServiceAccount:    pulumi.String("string"),
    					LocalSsdCount:           pulumi.Int(0),
    					UsePreemptibleExecutors: pulumi.Bool(false),
    					ZoneId:                  pulumi.String("string"),
    				},
    				ClusterId:        pulumi.String("string"),
    				IdempotencyToken: pulumi.String("string"),
    				ClusterMountInfos: databricks.JobJobClusterNewClusterClusterMountInfoArray{
    					&databricks.JobJobClusterNewClusterClusterMountInfoArgs{
    						LocalMountDirPath: pulumi.String("string"),
    						NetworkFilesystemInfo: &databricks.JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoArgs{
    							ServerAddress: pulumi.String("string"),
    							MountOptions:  pulumi.String("string"),
    						},
    						RemoteMountDirPath: pulumi.String("string"),
    					},
    				},
    				ClusterName: pulumi.String("string"),
    				CustomTags: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				DataSecurityMode: pulumi.String("string"),
    				DockerImage: &databricks.JobJobClusterNewClusterDockerImageArgs{
    					Url: pulumi.String("string"),
    					BasicAuth: &databricks.JobJobClusterNewClusterDockerImageBasicAuthArgs{
    						Password: pulumi.String("string"),
    						Username: pulumi.String("string"),
    					},
    				},
    				DriverInstancePoolId: pulumi.String("string"),
    				InitScripts: databricks.JobJobClusterNewClusterInitScriptArray{
    					&databricks.JobJobClusterNewClusterInitScriptArgs{
    						Abfss: &databricks.JobJobClusterNewClusterInitScriptAbfssArgs{
    							Destination: pulumi.String("string"),
    						},
    						File: &databricks.JobJobClusterNewClusterInitScriptFileArgs{
    							Destination: pulumi.String("string"),
    						},
    						Gcs: &databricks.JobJobClusterNewClusterInitScriptGcsArgs{
    							Destination: pulumi.String("string"),
    						},
    						S3: &databricks.JobJobClusterNewClusterInitScriptS3Args{
    							Destination:      pulumi.String("string"),
    							CannedAcl:        pulumi.String("string"),
    							EnableEncryption: pulumi.Bool(false),
    							EncryptionType:   pulumi.String("string"),
    							Endpoint:         pulumi.String("string"),
    							KmsKey:           pulumi.String("string"),
    							Region:           pulumi.String("string"),
    						},
    						Volumes: &databricks.JobJobClusterNewClusterInitScriptVolumesArgs{
    							Destination: pulumi.String("string"),
    						},
    						Workspace: &databricks.JobJobClusterNewClusterInitScriptWorkspaceArgs{
    							Destination: pulumi.String("string"),
    						},
    					},
    				},
    				EnableElasticDisk:        pulumi.Bool(false),
    				ApplyPolicyDefaultValues: pulumi.Bool(false),
    				AzureAttributes: &databricks.JobJobClusterNewClusterAzureAttributesArgs{
    					Availability:  pulumi.String("string"),
    					FirstOnDemand: pulumi.Int(0),
    					LogAnalyticsInfo: &databricks.JobJobClusterNewClusterAzureAttributesLogAnalyticsInfoArgs{
    						LogAnalyticsPrimaryKey:  pulumi.String("string"),
    						LogAnalyticsWorkspaceId: pulumi.String("string"),
    					},
    					SpotBidMaxPrice: pulumi.Float64(0),
    				},
    				AwsAttributes: &databricks.JobJobClusterNewClusterAwsAttributesArgs{
    					Availability:        pulumi.String("string"),
    					EbsVolumeCount:      pulumi.Int(0),
    					EbsVolumeIops:       pulumi.Int(0),
    					EbsVolumeSize:       pulumi.Int(0),
    					EbsVolumeThroughput: pulumi.Int(0),
    					EbsVolumeType:       pulumi.String("string"),
    					FirstOnDemand:       pulumi.Int(0),
    					InstanceProfileArn:  pulumi.String("string"),
    					SpotBidPricePercent: pulumi.Int(0),
    					ZoneId:              pulumi.String("string"),
    				},
    				DriverNodeTypeId: pulumi.String("string"),
    				InstancePoolId:   pulumi.String("string"),
    				Libraries: databricks.JobJobClusterNewClusterLibraryArray{
    					&databricks.JobJobClusterNewClusterLibraryArgs{
    						Cran: &databricks.JobJobClusterNewClusterLibraryCranArgs{
    							Package: pulumi.String("string"),
    							Repo:    pulumi.String("string"),
    						},
    						Egg: pulumi.String("string"),
    						Jar: pulumi.String("string"),
    						Maven: &databricks.JobJobClusterNewClusterLibraryMavenArgs{
    							Coordinates: pulumi.String("string"),
    							Exclusions: pulumi.StringArray{
    								pulumi.String("string"),
    							},
    							Repo: pulumi.String("string"),
    						},
    						Pypi: &databricks.JobJobClusterNewClusterLibraryPypiArgs{
    							Package: pulumi.String("string"),
    							Repo:    pulumi.String("string"),
    						},
    						Requirements: pulumi.String("string"),
    						Whl:          pulumi.String("string"),
    					},
    				},
    				NodeTypeId:     pulumi.String("string"),
    				NumWorkers:     pulumi.Int(0),
    				PolicyId:       pulumi.String("string"),
    				RuntimeEngine:  pulumi.String("string"),
    				SingleUserName: pulumi.String("string"),
    				SparkConf: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				SparkEnvVars: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				Autoscale: &databricks.JobJobClusterNewClusterAutoscaleArgs{
    					MaxWorkers: pulumi.Int(0),
    					MinWorkers: pulumi.Int(0),
    				},
    				SshPublicKeys: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				WorkloadType: &databricks.JobJobClusterNewClusterWorkloadTypeArgs{
    					Clients: &databricks.JobJobClusterNewClusterWorkloadTypeClientsArgs{
    						Jobs:      pulumi.Bool(false),
    						Notebooks: pulumi.Bool(false),
    					},
    				},
    			},
    		},
    	},
    	Libraries: databricks.JobLibraryArray{
    		&databricks.JobLibraryArgs{
    			Cran: &databricks.JobLibraryCranArgs{
    				Package: pulumi.String("string"),
    				Repo:    pulumi.String("string"),
    			},
    			Egg: pulumi.String("string"),
    			Jar: pulumi.String("string"),
    			Maven: &databricks.JobLibraryMavenArgs{
    				Coordinates: pulumi.String("string"),
    				Exclusions: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				Repo: pulumi.String("string"),
    			},
    			Pypi: &databricks.JobLibraryPypiArgs{
    				Package: pulumi.String("string"),
    				Repo:    pulumi.String("string"),
    			},
    			Requirements: pulumi.String("string"),
    			Whl:          pulumi.String("string"),
    		},
    	},
    	MaxConcurrentRuns: pulumi.Int(0),
    	Name:              pulumi.String("string"),
    	NewCluster: &databricks.JobNewClusterArgs{
    		SparkVersion:              pulumi.String("string"),
    		EnableLocalDiskEncryption: pulumi.Bool(false),
    		ClusterLogConf: &databricks.JobNewClusterClusterLogConfArgs{
    			Dbfs: &databricks.JobNewClusterClusterLogConfDbfsArgs{
    				Destination: pulumi.String("string"),
    			},
    			S3: &databricks.JobNewClusterClusterLogConfS3Args{
    				Destination:      pulumi.String("string"),
    				CannedAcl:        pulumi.String("string"),
    				EnableEncryption: pulumi.Bool(false),
    				EncryptionType:   pulumi.String("string"),
    				Endpoint:         pulumi.String("string"),
    				KmsKey:           pulumi.String("string"),
    				Region:           pulumi.String("string"),
    			},
    		},
    		GcpAttributes: &databricks.JobNewClusterGcpAttributesArgs{
    			Availability:            pulumi.String("string"),
    			BootDiskSize:            pulumi.Int(0),
    			GoogleServiceAccount:    pulumi.String("string"),
    			LocalSsdCount:           pulumi.Int(0),
    			UsePreemptibleExecutors: pulumi.Bool(false),
    			ZoneId:                  pulumi.String("string"),
    		},
    		ClusterId:        pulumi.String("string"),
    		IdempotencyToken: pulumi.String("string"),
    		ClusterMountInfos: databricks.JobNewClusterClusterMountInfoArray{
    			&databricks.JobNewClusterClusterMountInfoArgs{
    				LocalMountDirPath: pulumi.String("string"),
    				NetworkFilesystemInfo: &databricks.JobNewClusterClusterMountInfoNetworkFilesystemInfoArgs{
    					ServerAddress: pulumi.String("string"),
    					MountOptions:  pulumi.String("string"),
    				},
    				RemoteMountDirPath: pulumi.String("string"),
    			},
    		},
    		ClusterName: pulumi.String("string"),
    		CustomTags: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		DataSecurityMode: pulumi.String("string"),
    		DockerImage: &databricks.JobNewClusterDockerImageArgs{
    			Url: pulumi.String("string"),
    			BasicAuth: &databricks.JobNewClusterDockerImageBasicAuthArgs{
    				Password: pulumi.String("string"),
    				Username: pulumi.String("string"),
    			},
    		},
    		DriverInstancePoolId: pulumi.String("string"),
    		InitScripts: databricks.JobNewClusterInitScriptArray{
    			&databricks.JobNewClusterInitScriptArgs{
    				Abfss: &databricks.JobNewClusterInitScriptAbfssArgs{
    					Destination: pulumi.String("string"),
    				},
    				File: &databricks.JobNewClusterInitScriptFileArgs{
    					Destination: pulumi.String("string"),
    				},
    				Gcs: &databricks.JobNewClusterInitScriptGcsArgs{
    					Destination: pulumi.String("string"),
    				},
    				S3: &databricks.JobNewClusterInitScriptS3Args{
    					Destination:      pulumi.String("string"),
    					CannedAcl:        pulumi.String("string"),
    					EnableEncryption: pulumi.Bool(false),
    					EncryptionType:   pulumi.String("string"),
    					Endpoint:         pulumi.String("string"),
    					KmsKey:           pulumi.String("string"),
    					Region:           pulumi.String("string"),
    				},
    				Volumes: &databricks.JobNewClusterInitScriptVolumesArgs{
    					Destination: pulumi.String("string"),
    				},
    				Workspace: &databricks.JobNewClusterInitScriptWorkspaceArgs{
    					Destination: pulumi.String("string"),
    				},
    			},
    		},
    		EnableElasticDisk:        pulumi.Bool(false),
    		ApplyPolicyDefaultValues: pulumi.Bool(false),
    		AzureAttributes: &databricks.JobNewClusterAzureAttributesArgs{
    			Availability:  pulumi.String("string"),
    			FirstOnDemand: pulumi.Int(0),
    			LogAnalyticsInfo: &databricks.JobNewClusterAzureAttributesLogAnalyticsInfoArgs{
    				LogAnalyticsPrimaryKey:  pulumi.String("string"),
    				LogAnalyticsWorkspaceId: pulumi.String("string"),
    			},
    			SpotBidMaxPrice: pulumi.Float64(0),
    		},
    		AwsAttributes: &databricks.JobNewClusterAwsAttributesArgs{
    			Availability:        pulumi.String("string"),
    			EbsVolumeCount:      pulumi.Int(0),
    			EbsVolumeIops:       pulumi.Int(0),
    			EbsVolumeSize:       pulumi.Int(0),
    			EbsVolumeThroughput: pulumi.Int(0),
    			EbsVolumeType:       pulumi.String("string"),
    			FirstOnDemand:       pulumi.Int(0),
    			InstanceProfileArn:  pulumi.String("string"),
    			SpotBidPricePercent: pulumi.Int(0),
    			ZoneId:              pulumi.String("string"),
    		},
    		DriverNodeTypeId: pulumi.String("string"),
    		InstancePoolId:   pulumi.String("string"),
    		Libraries: databricks.JobNewClusterLibraryArray{
    			&databricks.JobNewClusterLibraryArgs{
    				Cran: &databricks.JobNewClusterLibraryCranArgs{
    					Package: pulumi.String("string"),
    					Repo:    pulumi.String("string"),
    				},
    				Egg: pulumi.String("string"),
    				Jar: pulumi.String("string"),
    				Maven: &databricks.JobNewClusterLibraryMavenArgs{
    					Coordinates: pulumi.String("string"),
    					Exclusions: pulumi.StringArray{
    						pulumi.String("string"),
    					},
    					Repo: pulumi.String("string"),
    				},
    				Pypi: &databricks.JobNewClusterLibraryPypiArgs{
    					Package: pulumi.String("string"),
    					Repo:    pulumi.String("string"),
    				},
    				Requirements: pulumi.String("string"),
    				Whl:          pulumi.String("string"),
    			},
    		},
    		NodeTypeId:     pulumi.String("string"),
    		NumWorkers:     pulumi.Int(0),
    		PolicyId:       pulumi.String("string"),
    		RuntimeEngine:  pulumi.String("string"),
    		SingleUserName: pulumi.String("string"),
    		SparkConf: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		SparkEnvVars: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		Autoscale: &databricks.JobNewClusterAutoscaleArgs{
    			MaxWorkers: pulumi.Int(0),
    			MinWorkers: pulumi.Int(0),
    		},
    		SshPublicKeys: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		WorkloadType: &databricks.JobNewClusterWorkloadTypeArgs{
    			Clients: &databricks.JobNewClusterWorkloadTypeClientsArgs{
    				Jobs:      pulumi.Bool(false),
    				Notebooks: pulumi.Bool(false),
    			},
    		},
    	},
    	NotificationSettings: &databricks.JobNotificationSettingsArgs{
    		NoAlertForCanceledRuns: pulumi.Bool(false),
    		NoAlertForSkippedRuns:  pulumi.Bool(false),
    	},
    	Parameters: databricks.JobParameterArray{
    		&databricks.JobParameterArgs{
    			Default: pulumi.String("string"),
    			Name:    pulumi.String("string"),
    		},
    	},
    	Queue: &databricks.JobQueueArgs{
    		Enabled: pulumi.Bool(false),
    	},
    	RunAs: &databricks.JobRunAsArgs{
    		ServicePrincipalName: pulumi.String("string"),
    		UserName:             pulumi.String("string"),
    	},
    	Schedule: &databricks.JobScheduleArgs{
    		QuartzCronExpression: pulumi.String("string"),
    		TimezoneId:           pulumi.String("string"),
    		PauseStatus:          pulumi.String("string"),
    	},
    	Tags: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	Tasks: databricks.JobTaskArray{
    		&databricks.JobTaskArgs{
    			TaskKey: pulumi.String("string"),
    			NewCluster: &databricks.JobTaskNewClusterArgs{
    				SparkVersion:              pulumi.String("string"),
    				EnableLocalDiskEncryption: pulumi.Bool(false),
    				ClusterLogConf: &databricks.JobTaskNewClusterClusterLogConfArgs{
    					Dbfs: &databricks.JobTaskNewClusterClusterLogConfDbfsArgs{
    						Destination: pulumi.String("string"),
    					},
    					S3: &databricks.JobTaskNewClusterClusterLogConfS3Args{
    						Destination:      pulumi.String("string"),
    						CannedAcl:        pulumi.String("string"),
    						EnableEncryption: pulumi.Bool(false),
    						EncryptionType:   pulumi.String("string"),
    						Endpoint:         pulumi.String("string"),
    						KmsKey:           pulumi.String("string"),
    						Region:           pulumi.String("string"),
    					},
    				},
    				GcpAttributes: &databricks.JobTaskNewClusterGcpAttributesArgs{
    					Availability:            pulumi.String("string"),
    					BootDiskSize:            pulumi.Int(0),
    					GoogleServiceAccount:    pulumi.String("string"),
    					LocalSsdCount:           pulumi.Int(0),
    					UsePreemptibleExecutors: pulumi.Bool(false),
    					ZoneId:                  pulumi.String("string"),
    				},
    				ClusterId:        pulumi.String("string"),
    				IdempotencyToken: pulumi.String("string"),
    				ClusterMountInfos: databricks.JobTaskNewClusterClusterMountInfoArray{
    					&databricks.JobTaskNewClusterClusterMountInfoArgs{
    						LocalMountDirPath: pulumi.String("string"),
    						NetworkFilesystemInfo: &databricks.JobTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs{
    							ServerAddress: pulumi.String("string"),
    							MountOptions:  pulumi.String("string"),
    						},
    						RemoteMountDirPath: pulumi.String("string"),
    					},
    				},
    				ClusterName: pulumi.String("string"),
    				CustomTags: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				DataSecurityMode: pulumi.String("string"),
    				DockerImage: &databricks.JobTaskNewClusterDockerImageArgs{
    					Url: pulumi.String("string"),
    					BasicAuth: &databricks.JobTaskNewClusterDockerImageBasicAuthArgs{
    						Password: pulumi.String("string"),
    						Username: pulumi.String("string"),
    					},
    				},
    				DriverInstancePoolId: pulumi.String("string"),
    				InitScripts: databricks.JobTaskNewClusterInitScriptArray{
    					&databricks.JobTaskNewClusterInitScriptArgs{
    						Abfss: &databricks.JobTaskNewClusterInitScriptAbfssArgs{
    							Destination: pulumi.String("string"),
    						},
    						File: &databricks.JobTaskNewClusterInitScriptFileArgs{
    							Destination: pulumi.String("string"),
    						},
    						Gcs: &databricks.JobTaskNewClusterInitScriptGcsArgs{
    							Destination: pulumi.String("string"),
    						},
    						S3: &databricks.JobTaskNewClusterInitScriptS3Args{
    							Destination:      pulumi.String("string"),
    							CannedAcl:        pulumi.String("string"),
    							EnableEncryption: pulumi.Bool(false),
    							EncryptionType:   pulumi.String("string"),
    							Endpoint:         pulumi.String("string"),
    							KmsKey:           pulumi.String("string"),
    							Region:           pulumi.String("string"),
    						},
    						Volumes: &databricks.JobTaskNewClusterInitScriptVolumesArgs{
    							Destination: pulumi.String("string"),
    						},
    						Workspace: &databricks.JobTaskNewClusterInitScriptWorkspaceArgs{
    							Destination: pulumi.String("string"),
    						},
    					},
    				},
    				EnableElasticDisk:        pulumi.Bool(false),
    				ApplyPolicyDefaultValues: pulumi.Bool(false),
    				AzureAttributes: &databricks.JobTaskNewClusterAzureAttributesArgs{
    					Availability:  pulumi.String("string"),
    					FirstOnDemand: pulumi.Int(0),
    					LogAnalyticsInfo: &databricks.JobTaskNewClusterAzureAttributesLogAnalyticsInfoArgs{
    						LogAnalyticsPrimaryKey:  pulumi.String("string"),
    						LogAnalyticsWorkspaceId: pulumi.String("string"),
    					},
    					SpotBidMaxPrice: pulumi.Float64(0),
    				},
    				AwsAttributes: &databricks.JobTaskNewClusterAwsAttributesArgs{
    					Availability:        pulumi.String("string"),
    					EbsVolumeCount:      pulumi.Int(0),
    					EbsVolumeIops:       pulumi.Int(0),
    					EbsVolumeSize:       pulumi.Int(0),
    					EbsVolumeThroughput: pulumi.Int(0),
    					EbsVolumeType:       pulumi.String("string"),
    					FirstOnDemand:       pulumi.Int(0),
    					InstanceProfileArn:  pulumi.String("string"),
    					SpotBidPricePercent: pulumi.Int(0),
    					ZoneId:              pulumi.String("string"),
    				},
    				DriverNodeTypeId: pulumi.String("string"),
    				InstancePoolId:   pulumi.String("string"),
    				Libraries: databricks.JobTaskNewClusterLibraryArray{
    					&databricks.JobTaskNewClusterLibraryArgs{
    						Cran: &databricks.JobTaskNewClusterLibraryCranArgs{
    							Package: pulumi.String("string"),
    							Repo:    pulumi.String("string"),
    						},
    						Egg: pulumi.String("string"),
    						Jar: pulumi.String("string"),
    						Maven: &databricks.JobTaskNewClusterLibraryMavenArgs{
    							Coordinates: pulumi.String("string"),
    							Exclusions: pulumi.StringArray{
    								pulumi.String("string"),
    							},
    							Repo: pulumi.String("string"),
    						},
    						Pypi: &databricks.JobTaskNewClusterLibraryPypiArgs{
    							Package: pulumi.String("string"),
    							Repo:    pulumi.String("string"),
    						},
    						Requirements: pulumi.String("string"),
    						Whl:          pulumi.String("string"),
    					},
    				},
    				NodeTypeId:     pulumi.String("string"),
    				NumWorkers:     pulumi.Int(0),
    				PolicyId:       pulumi.String("string"),
    				RuntimeEngine:  pulumi.String("string"),
    				SingleUserName: pulumi.String("string"),
    				SparkConf: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				SparkEnvVars: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				Autoscale: &databricks.JobTaskNewClusterAutoscaleArgs{
    					MaxWorkers: pulumi.Int(0),
    					MinWorkers: pulumi.Int(0),
    				},
    				SshPublicKeys: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				WorkloadType: &databricks.JobTaskNewClusterWorkloadTypeArgs{
    					Clients: &databricks.JobTaskNewClusterWorkloadTypeClientsArgs{
    						Jobs:      pulumi.Bool(false),
    						Notebooks: pulumi.Bool(false),
    					},
    				},
    			},
    			DbtTask: &databricks.JobTaskDbtTaskArgs{
    				Commands: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				Catalog:           pulumi.String("string"),
    				ProfilesDirectory: pulumi.String("string"),
    				ProjectDirectory:  pulumi.String("string"),
    				Schema:            pulumi.String("string"),
    				Source:            pulumi.String("string"),
    				WarehouseId:       pulumi.String("string"),
    			},
    			Description:             pulumi.String("string"),
    			DisableAutoOptimization: pulumi.Bool(false),
    			EmailNotifications: &databricks.JobTaskEmailNotificationsArgs{
    				NoAlertForSkippedRuns: pulumi.Bool(false),
    				OnDurationWarningThresholdExceededs: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				OnFailures: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				OnStarts: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				OnStreamingBacklogExceededs: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				OnSuccesses: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    			},
    			EnvironmentKey:    pulumi.String("string"),
    			ExistingClusterId: pulumi.String("string"),
    			ForEachTask: &databricks.JobTaskForEachTaskArgs{
    				Inputs: pulumi.String("string"),
    				Task: &databricks.JobTaskForEachTaskTaskArgs{
    					TaskKey: pulumi.String("string"),
    					NotebookTask: &databricks.JobTaskForEachTaskTaskNotebookTaskArgs{
    						NotebookPath: pulumi.String("string"),
    						BaseParameters: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						Source:      pulumi.String("string"),
    						WarehouseId: pulumi.String("string"),
    					},
    					WebhookNotifications: &databricks.JobTaskForEachTaskTaskWebhookNotificationsArgs{
    						OnDurationWarningThresholdExceededs: databricks.JobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceededArray{
    							&databricks.JobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs{
    								Id: pulumi.String("string"),
    							},
    						},
    						OnFailures: databricks.JobTaskForEachTaskTaskWebhookNotificationsOnFailureArray{
    							&databricks.JobTaskForEachTaskTaskWebhookNotificationsOnFailureArgs{
    								Id: pulumi.String("string"),
    							},
    						},
    						OnStarts: databricks.JobTaskForEachTaskTaskWebhookNotificationsOnStartArray{
    							&databricks.JobTaskForEachTaskTaskWebhookNotificationsOnStartArgs{
    								Id: pulumi.String("string"),
    							},
    						},
    						OnStreamingBacklogExceededs: databricks.JobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceededArray{
    							&databricks.JobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceededArgs{
    								Id: pulumi.String("string"),
    							},
    						},
    						OnSuccesses: databricks.JobTaskForEachTaskTaskWebhookNotificationsOnSuccessArray{
    							&databricks.JobTaskForEachTaskTaskWebhookNotificationsOnSuccessArgs{
    								Id: pulumi.String("string"),
    							},
    						},
    					},
    					NewCluster: &databricks.JobTaskForEachTaskTaskNewClusterArgs{
    						SparkVersion:              pulumi.String("string"),
    						EnableLocalDiskEncryption: pulumi.Bool(false),
    						ClusterLogConf: &databricks.JobTaskForEachTaskTaskNewClusterClusterLogConfArgs{
    							Dbfs: &databricks.JobTaskForEachTaskTaskNewClusterClusterLogConfDbfsArgs{
    								Destination: pulumi.String("string"),
    							},
    							S3: &databricks.JobTaskForEachTaskTaskNewClusterClusterLogConfS3Args{
    								Destination:      pulumi.String("string"),
    								CannedAcl:        pulumi.String("string"),
    								EnableEncryption: pulumi.Bool(false),
    								EncryptionType:   pulumi.String("string"),
    								Endpoint:         pulumi.String("string"),
    								KmsKey:           pulumi.String("string"),
    								Region:           pulumi.String("string"),
    							},
    						},
    						GcpAttributes: &databricks.JobTaskForEachTaskTaskNewClusterGcpAttributesArgs{
    							Availability:            pulumi.String("string"),
    							BootDiskSize:            pulumi.Int(0),
    							GoogleServiceAccount:    pulumi.String("string"),
    							LocalSsdCount:           pulumi.Int(0),
    							UsePreemptibleExecutors: pulumi.Bool(false),
    							ZoneId:                  pulumi.String("string"),
    						},
    						ClusterId:        pulumi.String("string"),
    						IdempotencyToken: pulumi.String("string"),
    						ClusterMountInfos: databricks.JobTaskForEachTaskTaskNewClusterClusterMountInfoArray{
    							&databricks.JobTaskForEachTaskTaskNewClusterClusterMountInfoArgs{
    								LocalMountDirPath: pulumi.String("string"),
    								NetworkFilesystemInfo: &databricks.JobTaskForEachTaskTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs{
    									ServerAddress: pulumi.String("string"),
    									MountOptions:  pulumi.String("string"),
    								},
    								RemoteMountDirPath: pulumi.String("string"),
    							},
    						},
    						ClusterName: pulumi.String("string"),
    						CustomTags: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						DataSecurityMode: pulumi.String("string"),
    						DockerImage: &databricks.JobTaskForEachTaskTaskNewClusterDockerImageArgs{
    							Url: pulumi.String("string"),
    							BasicAuth: &databricks.JobTaskForEachTaskTaskNewClusterDockerImageBasicAuthArgs{
    								Password: pulumi.String("string"),
    								Username: pulumi.String("string"),
    							},
    						},
    						DriverInstancePoolId: pulumi.String("string"),
    						InitScripts: databricks.JobTaskForEachTaskTaskNewClusterInitScriptArray{
    							&databricks.JobTaskForEachTaskTaskNewClusterInitScriptArgs{
    								Abfss: &databricks.JobTaskForEachTaskTaskNewClusterInitScriptAbfssArgs{
    									Destination: pulumi.String("string"),
    								},
    								File: &databricks.JobTaskForEachTaskTaskNewClusterInitScriptFileArgs{
    									Destination: pulumi.String("string"),
    								},
    								Gcs: &databricks.JobTaskForEachTaskTaskNewClusterInitScriptGcsArgs{
    									Destination: pulumi.String("string"),
    								},
    								S3: &databricks.JobTaskForEachTaskTaskNewClusterInitScriptS3Args{
    									Destination:      pulumi.String("string"),
    									CannedAcl:        pulumi.String("string"),
    									EnableEncryption: pulumi.Bool(false),
    									EncryptionType:   pulumi.String("string"),
    									Endpoint:         pulumi.String("string"),
    									KmsKey:           pulumi.String("string"),
    									Region:           pulumi.String("string"),
    								},
    								Volumes: &databricks.JobTaskForEachTaskTaskNewClusterInitScriptVolumesArgs{
    									Destination: pulumi.String("string"),
    								},
    								Workspace: &databricks.JobTaskForEachTaskTaskNewClusterInitScriptWorkspaceArgs{
    									Destination: pulumi.String("string"),
    								},
    							},
    						},
    						EnableElasticDisk:        pulumi.Bool(false),
    						ApplyPolicyDefaultValues: pulumi.Bool(false),
    						AzureAttributes: &databricks.JobTaskForEachTaskTaskNewClusterAzureAttributesArgs{
    							Availability:  pulumi.String("string"),
    							FirstOnDemand: pulumi.Int(0),
    							LogAnalyticsInfo: &databricks.JobTaskForEachTaskTaskNewClusterAzureAttributesLogAnalyticsInfoArgs{
    								LogAnalyticsPrimaryKey:  pulumi.String("string"),
    								LogAnalyticsWorkspaceId: pulumi.String("string"),
    							},
    							SpotBidMaxPrice: pulumi.Float64(0),
    						},
    						AwsAttributes: &databricks.JobTaskForEachTaskTaskNewClusterAwsAttributesArgs{
    							Availability:        pulumi.String("string"),
    							EbsVolumeCount:      pulumi.Int(0),
    							EbsVolumeIops:       pulumi.Int(0),
    							EbsVolumeSize:       pulumi.Int(0),
    							EbsVolumeThroughput: pulumi.Int(0),
    							EbsVolumeType:       pulumi.String("string"),
    							FirstOnDemand:       pulumi.Int(0),
    							InstanceProfileArn:  pulumi.String("string"),
    							SpotBidPricePercent: pulumi.Int(0),
    							ZoneId:              pulumi.String("string"),
    						},
    						DriverNodeTypeId: pulumi.String("string"),
    						InstancePoolId:   pulumi.String("string"),
    						Libraries: databricks.JobTaskForEachTaskTaskNewClusterLibraryArray{
    							&databricks.JobTaskForEachTaskTaskNewClusterLibraryArgs{
    								Cran: &databricks.JobTaskForEachTaskTaskNewClusterLibraryCranArgs{
    									Package: pulumi.String("string"),
    									Repo:    pulumi.String("string"),
    								},
    								Egg: pulumi.String("string"),
    								Jar: pulumi.String("string"),
    								Maven: &databricks.JobTaskForEachTaskTaskNewClusterLibraryMavenArgs{
    									Coordinates: pulumi.String("string"),
    									Exclusions: pulumi.StringArray{
    										pulumi.String("string"),
    									},
    									Repo: pulumi.String("string"),
    								},
    								Pypi: &databricks.JobTaskForEachTaskTaskNewClusterLibraryPypiArgs{
    									Package: pulumi.String("string"),
    									Repo:    pulumi.String("string"),
    								},
    								Requirements: pulumi.String("string"),
    								Whl:          pulumi.String("string"),
    							},
    						},
    						NodeTypeId:     pulumi.String("string"),
    						NumWorkers:     pulumi.Int(0),
    						PolicyId:       pulumi.String("string"),
    						RuntimeEngine:  pulumi.String("string"),
    						SingleUserName: pulumi.String("string"),
    						SparkConf: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						SparkEnvVars: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						Autoscale: &databricks.JobTaskForEachTaskTaskNewClusterAutoscaleArgs{
    							MaxWorkers: pulumi.Int(0),
    							MinWorkers: pulumi.Int(0),
    						},
    						SshPublicKeys: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						WorkloadType: &databricks.JobTaskForEachTaskTaskNewClusterWorkloadTypeArgs{
    							Clients: &databricks.JobTaskForEachTaskTaskNewClusterWorkloadTypeClientsArgs{
    								Jobs:      pulumi.Bool(false),
    								Notebooks: pulumi.Bool(false),
    							},
    						},
    					},
    					DisableAutoOptimization: pulumi.Bool(false),
    					EmailNotifications: &databricks.JobTaskForEachTaskTaskEmailNotificationsArgs{
    						NoAlertForSkippedRuns: pulumi.Bool(false),
    						OnDurationWarningThresholdExceededs: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						OnFailures: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						OnStarts: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						OnStreamingBacklogExceededs: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						OnSuccesses: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    					},
    					EnvironmentKey:    pulumi.String("string"),
    					ExistingClusterId: pulumi.String("string"),
    					Health: &databricks.JobTaskForEachTaskTaskHealthArgs{
    						Rules: databricks.JobTaskForEachTaskTaskHealthRuleArray{
    							&databricks.JobTaskForEachTaskTaskHealthRuleArgs{
    								Metric: pulumi.String("string"),
    								Op:     pulumi.String("string"),
    								Value:  pulumi.Int(0),
    							},
    						},
    					},
    					JobClusterKey: pulumi.String("string"),
    					Libraries: databricks.JobTaskForEachTaskTaskLibraryArray{
    						&databricks.JobTaskForEachTaskTaskLibraryArgs{
    							Cran: &databricks.JobTaskForEachTaskTaskLibraryCranArgs{
    								Package: pulumi.String("string"),
    								Repo:    pulumi.String("string"),
    							},
    							Egg: pulumi.String("string"),
    							Jar: pulumi.String("string"),
    							Maven: &databricks.JobTaskForEachTaskTaskLibraryMavenArgs{
    								Coordinates: pulumi.String("string"),
    								Exclusions: pulumi.StringArray{
    									pulumi.String("string"),
    								},
    								Repo: pulumi.String("string"),
    							},
    							Pypi: &databricks.JobTaskForEachTaskTaskLibraryPypiArgs{
    								Package: pulumi.String("string"),
    								Repo:    pulumi.String("string"),
    							},
    							Requirements: pulumi.String("string"),
    							Whl:          pulumi.String("string"),
    						},
    					},
    					MaxRetries:             pulumi.Int(0),
    					MinRetryIntervalMillis: pulumi.Int(0),
    					Description:            pulumi.String("string"),
    					DependsOns: databricks.JobTaskForEachTaskTaskDependsOnArray{
    						&databricks.JobTaskForEachTaskTaskDependsOnArgs{
    							TaskKey: pulumi.String("string"),
    							Outcome: pulumi.String("string"),
    						},
    					},
    					SparkPythonTask: &databricks.JobTaskForEachTaskTaskSparkPythonTaskArgs{
    						PythonFile: pulumi.String("string"),
    						Parameters: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						Source: pulumi.String("string"),
    					},
    					PipelineTask: &databricks.JobTaskForEachTaskTaskPipelineTaskArgs{
    						PipelineId:  pulumi.String("string"),
    						FullRefresh: pulumi.Bool(false),
    					},
    					PythonWheelTask: &databricks.JobTaskForEachTaskTaskPythonWheelTaskArgs{
    						EntryPoint: pulumi.String("string"),
    						NamedParameters: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						PackageName: pulumi.String("string"),
    						Parameters: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    					},
    					RetryOnTimeout: pulumi.Bool(false),
    					RunIf:          pulumi.String("string"),
    					RunJobTask: &databricks.JobTaskForEachTaskTaskRunJobTaskArgs{
    						JobId: pulumi.Int(0),
    						DbtCommands: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						JarParams: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						JobParameters: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						NotebookParams: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						PipelineParams: &databricks.JobTaskForEachTaskTaskRunJobTaskPipelineParamsArgs{
    							FullRefresh: pulumi.Bool(false),
    						},
    						PythonNamedParams: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						PythonParams: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						SparkSubmitParams: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						SqlParams: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    					},
    					SparkJarTask: &databricks.JobTaskForEachTaskTaskSparkJarTaskArgs{
    						JarUri:        pulumi.String("string"),
    						MainClassName: pulumi.String("string"),
    						Parameters: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    					},
    					NotificationSettings: &databricks.JobTaskForEachTaskTaskNotificationSettingsArgs{
    						AlertOnLastAttempt:     pulumi.Bool(false),
    						NoAlertForCanceledRuns: pulumi.Bool(false),
    						NoAlertForSkippedRuns:  pulumi.Bool(false),
    					},
    					SparkSubmitTask: &databricks.JobTaskForEachTaskTaskSparkSubmitTaskArgs{
    						Parameters: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    					},
    					SqlTask: &databricks.JobTaskForEachTaskTaskSqlTaskArgs{
    						WarehouseId: pulumi.String("string"),
    						Alert: &databricks.JobTaskForEachTaskTaskSqlTaskAlertArgs{
    							AlertId: pulumi.String("string"),
    							Subscriptions: databricks.JobTaskForEachTaskTaskSqlTaskAlertSubscriptionArray{
    								&databricks.JobTaskForEachTaskTaskSqlTaskAlertSubscriptionArgs{
    									DestinationId: pulumi.String("string"),
    									UserName:      pulumi.String("string"),
    								},
    							},
    							PauseSubscriptions: pulumi.Bool(false),
    						},
    						Dashboard: &databricks.JobTaskForEachTaskTaskSqlTaskDashboardArgs{
    							DashboardId:        pulumi.String("string"),
    							CustomSubject:      pulumi.String("string"),
    							PauseSubscriptions: pulumi.Bool(false),
    							Subscriptions: databricks.JobTaskForEachTaskTaskSqlTaskDashboardSubscriptionArray{
    								&databricks.JobTaskForEachTaskTaskSqlTaskDashboardSubscriptionArgs{
    									DestinationId: pulumi.String("string"),
    									UserName:      pulumi.String("string"),
    								},
    							},
    						},
    						File: &databricks.JobTaskForEachTaskTaskSqlTaskFileArgs{
    							Path:   pulumi.String("string"),
    							Source: pulumi.String("string"),
    						},
    						Parameters: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						Query: &databricks.JobTaskForEachTaskTaskSqlTaskQueryArgs{
    							QueryId: pulumi.String("string"),
    						},
    					},
    					DbtTask: &databricks.JobTaskForEachTaskTaskDbtTaskArgs{
    						Commands: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						Catalog:           pulumi.String("string"),
    						ProfilesDirectory: pulumi.String("string"),
    						ProjectDirectory:  pulumi.String("string"),
    						Schema:            pulumi.String("string"),
    						Source:            pulumi.String("string"),
    						WarehouseId:       pulumi.String("string"),
    					},
    					TimeoutSeconds: pulumi.Int(0),
    					ConditionTask: &databricks.JobTaskForEachTaskTaskConditionTaskArgs{
    						Left:  pulumi.String("string"),
    						Op:    pulumi.String("string"),
    						Right: pulumi.String("string"),
    					},
    				},
    				Concurrency: pulumi.Int(0),
    			},
    			Health: &databricks.JobTaskHealthArgs{
    				Rules: databricks.JobTaskHealthRuleArray{
    					&databricks.JobTaskHealthRuleArgs{
    						Metric: pulumi.String("string"),
    						Op:     pulumi.String("string"),
    						Value:  pulumi.Int(0),
    					},
    				},
    			},
    			JobClusterKey: pulumi.String("string"),
    			Libraries: databricks.JobTaskLibraryArray{
    				&databricks.JobTaskLibraryArgs{
    					Cran: &databricks.JobTaskLibraryCranArgs{
    						Package: pulumi.String("string"),
    						Repo:    pulumi.String("string"),
    					},
    					Egg: pulumi.String("string"),
    					Jar: pulumi.String("string"),
    					Maven: &databricks.JobTaskLibraryMavenArgs{
    						Coordinates: pulumi.String("string"),
    						Exclusions: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						Repo: pulumi.String("string"),
    					},
    					Pypi: &databricks.JobTaskLibraryPypiArgs{
    						Package: pulumi.String("string"),
    						Repo:    pulumi.String("string"),
    					},
    					Requirements: pulumi.String("string"),
    					Whl:          pulumi.String("string"),
    				},
    			},
    			MaxRetries: pulumi.Int(0),
    			WebhookNotifications: &databricks.JobTaskWebhookNotificationsArgs{
    				OnDurationWarningThresholdExceededs: databricks.JobTaskWebhookNotificationsOnDurationWarningThresholdExceededArray{
    					&databricks.JobTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs{
    						Id: pulumi.String("string"),
    					},
    				},
    				OnFailures: databricks.JobTaskWebhookNotificationsOnFailureArray{
    					&databricks.JobTaskWebhookNotificationsOnFailureArgs{
    						Id: pulumi.String("string"),
    					},
    				},
    				OnStarts: databricks.JobTaskWebhookNotificationsOnStartArray{
    					&databricks.JobTaskWebhookNotificationsOnStartArgs{
    						Id: pulumi.String("string"),
    					},
    				},
    				OnStreamingBacklogExceededs: databricks.JobTaskWebhookNotificationsOnStreamingBacklogExceededArray{
    					&databricks.JobTaskWebhookNotificationsOnStreamingBacklogExceededArgs{
    						Id: pulumi.String("string"),
    					},
    				},
    				OnSuccesses: databricks.JobTaskWebhookNotificationsOnSuccessArray{
    					&databricks.JobTaskWebhookNotificationsOnSuccessArgs{
    						Id: pulumi.String("string"),
    					},
    				},
    			},
    			DependsOns: databricks.JobTaskDependsOnArray{
    				&databricks.JobTaskDependsOnArgs{
    					TaskKey: pulumi.String("string"),
    					Outcome: pulumi.String("string"),
    				},
    			},
    			RetryOnTimeout: pulumi.Bool(false),
    			NotificationSettings: &databricks.JobTaskNotificationSettingsArgs{
    				AlertOnLastAttempt:     pulumi.Bool(false),
    				NoAlertForCanceledRuns: pulumi.Bool(false),
    				NoAlertForSkippedRuns:  pulumi.Bool(false),
    			},
    			PipelineTask: &databricks.JobTaskPipelineTaskArgs{
    				PipelineId:  pulumi.String("string"),
    				FullRefresh: pulumi.Bool(false),
    			},
    			PythonWheelTask: &databricks.JobTaskPythonWheelTaskArgs{
    				EntryPoint: pulumi.String("string"),
    				NamedParameters: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				PackageName: pulumi.String("string"),
    				Parameters: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    			},
    			NotebookTask: &databricks.JobTaskNotebookTaskArgs{
    				NotebookPath: pulumi.String("string"),
    				BaseParameters: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				Source:      pulumi.String("string"),
    				WarehouseId: pulumi.String("string"),
    			},
    			RunIf: pulumi.String("string"),
    			RunJobTask: &databricks.JobTaskRunJobTaskArgs{
    				JobId: pulumi.Int(0),
    				DbtCommands: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				JarParams: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				JobParameters: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				NotebookParams: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				PipelineParams: &databricks.JobTaskRunJobTaskPipelineParamsArgs{
    					FullRefresh: pulumi.Bool(false),
    				},
    				PythonNamedParams: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				PythonParams: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				SparkSubmitParams: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				SqlParams: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    			},
    			SparkJarTask: &databricks.JobTaskSparkJarTaskArgs{
    				JarUri:        pulumi.String("string"),
    				MainClassName: pulumi.String("string"),
    				Parameters: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    			},
    			SparkPythonTask: &databricks.JobTaskSparkPythonTaskArgs{
    				PythonFile: pulumi.String("string"),
    				Parameters: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				Source: pulumi.String("string"),
    			},
    			SparkSubmitTask: &databricks.JobTaskSparkSubmitTaskArgs{
    				Parameters: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    			},
    			SqlTask: &databricks.JobTaskSqlTaskArgs{
    				WarehouseId: pulumi.String("string"),
    				Alert: &databricks.JobTaskSqlTaskAlertArgs{
    					AlertId: pulumi.String("string"),
    					Subscriptions: databricks.JobTaskSqlTaskAlertSubscriptionArray{
    						&databricks.JobTaskSqlTaskAlertSubscriptionArgs{
    							DestinationId: pulumi.String("string"),
    							UserName:      pulumi.String("string"),
    						},
    					},
    					PauseSubscriptions: pulumi.Bool(false),
    				},
    				Dashboard: &databricks.JobTaskSqlTaskDashboardArgs{
    					DashboardId:        pulumi.String("string"),
    					CustomSubject:      pulumi.String("string"),
    					PauseSubscriptions: pulumi.Bool(false),
    					Subscriptions: databricks.JobTaskSqlTaskDashboardSubscriptionArray{
    						&databricks.JobTaskSqlTaskDashboardSubscriptionArgs{
    							DestinationId: pulumi.String("string"),
    							UserName:      pulumi.String("string"),
    						},
    					},
    				},
    				File: &databricks.JobTaskSqlTaskFileArgs{
    					Path:   pulumi.String("string"),
    					Source: pulumi.String("string"),
    				},
    				Parameters: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				Query: &databricks.JobTaskSqlTaskQueryArgs{
    					QueryId: pulumi.String("string"),
    				},
    			},
    			ConditionTask: &databricks.JobTaskConditionTaskArgs{
    				Left:  pulumi.String("string"),
    				Op:    pulumi.String("string"),
    				Right: pulumi.String("string"),
    			},
    			TimeoutSeconds:         pulumi.Int(0),
    			MinRetryIntervalMillis: pulumi.Int(0),
    		},
    	},
    	TimeoutSeconds: pulumi.Int(0),
    	Trigger: &databricks.JobTriggerArgs{
    		FileArrival: &databricks.JobTriggerFileArrivalArgs{
    			Url:                           pulumi.String("string"),
    			MinTimeBetweenTriggersSeconds: pulumi.Int(0),
    			WaitAfterLastChangeSeconds:    pulumi.Int(0),
    		},
    		PauseStatus: pulumi.String("string"),
    		Periodic: &databricks.JobTriggerPeriodicArgs{
    			Interval: pulumi.Int(0),
    			Unit:     pulumi.String("string"),
    		},
    		Table: &databricks.JobTriggerTableArgs{
    			Condition:                     pulumi.String("string"),
    			MinTimeBetweenTriggersSeconds: pulumi.Int(0),
    			TableNames: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			WaitAfterLastChangeSeconds: pulumi.Int(0),
    		},
    		TableUpdate: &databricks.JobTriggerTableUpdateArgs{
    			TableNames: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			Condition:                     pulumi.String("string"),
    			MinTimeBetweenTriggersSeconds: pulumi.Int(0),
    			WaitAfterLastChangeSeconds:    pulumi.Int(0),
    		},
    	},
    	WebhookNotifications: &databricks.JobWebhookNotificationsArgs{
    		OnDurationWarningThresholdExceededs: databricks.JobWebhookNotificationsOnDurationWarningThresholdExceededArray{
    			&databricks.JobWebhookNotificationsOnDurationWarningThresholdExceededArgs{
    				Id: pulumi.String("string"),
    			},
    		},
    		OnFailures: databricks.JobWebhookNotificationsOnFailureArray{
    			&databricks.JobWebhookNotificationsOnFailureArgs{
    				Id: pulumi.String("string"),
    			},
    		},
    		OnStarts: databricks.JobWebhookNotificationsOnStartArray{
    			&databricks.JobWebhookNotificationsOnStartArgs{
    				Id: pulumi.String("string"),
    			},
    		},
    		OnStreamingBacklogExceededs: databricks.JobWebhookNotificationsOnStreamingBacklogExceededArray{
    			&databricks.JobWebhookNotificationsOnStreamingBacklogExceededArgs{
    				Id: pulumi.String("string"),
    			},
    		},
    		OnSuccesses: databricks.JobWebhookNotificationsOnSuccessArray{
    			&databricks.JobWebhookNotificationsOnSuccessArgs{
    				Id: pulumi.String("string"),
    			},
    		},
    	},
    })
    
    var jobResource = new Job("jobResource", JobArgs.builder()
        .continuous(JobContinuousArgs.builder()
            .pauseStatus("string")
            .build())
        .controlRunState(false)
        .deployment(JobDeploymentArgs.builder()
            .kind("string")
            .metadataFilePath("string")
            .build())
        .description("string")
        .editMode("string")
        .emailNotifications(JobEmailNotificationsArgs.builder()
            .noAlertForSkippedRuns(false)
            .onDurationWarningThresholdExceededs("string")
            .onFailures("string")
            .onStarts("string")
            .onStreamingBacklogExceededs("string")
            .onSuccesses("string")
            .build())
        .environments(JobEnvironmentArgs.builder()
            .environmentKey("string")
            .spec(JobEnvironmentSpecArgs.builder()
                .client("string")
                .dependencies("string")
                .build())
            .build())
        .existingClusterId("string")
        .format("string")
        .gitSource(JobGitSourceArgs.builder()
            .url("string")
            .branch("string")
            .commit("string")
            .gitSnapshot(JobGitSourceGitSnapshotArgs.builder()
                .usedCommit("string")
                .build())
            .jobSource(JobGitSourceJobSourceArgs.builder()
                .importFromGitBranch("string")
                .jobConfigPath("string")
                .dirtyState("string")
                .build())
            .provider("string")
            .tag("string")
            .build())
        .health(JobHealthArgs.builder()
            .rules(JobHealthRuleArgs.builder()
                .metric("string")
                .op("string")
                .value(0)
                .build())
            .build())
        .jobClusters(JobJobClusterArgs.builder()
            .jobClusterKey("string")
            .newCluster(JobJobClusterNewClusterArgs.builder()
                .sparkVersion("string")
                .enableLocalDiskEncryption(false)
                .clusterLogConf(JobJobClusterNewClusterClusterLogConfArgs.builder()
                    .dbfs(JobJobClusterNewClusterClusterLogConfDbfsArgs.builder()
                        .destination("string")
                        .build())
                    .s3(JobJobClusterNewClusterClusterLogConfS3Args.builder()
                        .destination("string")
                        .cannedAcl("string")
                        .enableEncryption(false)
                        .encryptionType("string")
                        .endpoint("string")
                        .kmsKey("string")
                        .region("string")
                        .build())
                    .build())
                .gcpAttributes(JobJobClusterNewClusterGcpAttributesArgs.builder()
                    .availability("string")
                    .bootDiskSize(0)
                    .googleServiceAccount("string")
                    .localSsdCount(0)
                    .usePreemptibleExecutors(false)
                    .zoneId("string")
                    .build())
                .clusterId("string")
                .idempotencyToken("string")
                .clusterMountInfos(JobJobClusterNewClusterClusterMountInfoArgs.builder()
                    .localMountDirPath("string")
                    .networkFilesystemInfo(JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoArgs.builder()
                        .serverAddress("string")
                        .mountOptions("string")
                        .build())
                    .remoteMountDirPath("string")
                    .build())
                .clusterName("string")
                .customTags(Map.of("string", "string"))
                .dataSecurityMode("string")
                .dockerImage(JobJobClusterNewClusterDockerImageArgs.builder()
                    .url("string")
                    .basicAuth(JobJobClusterNewClusterDockerImageBasicAuthArgs.builder()
                        .password("string")
                        .username("string")
                        .build())
                    .build())
                .driverInstancePoolId("string")
                .initScripts(JobJobClusterNewClusterInitScriptArgs.builder()
                    .abfss(JobJobClusterNewClusterInitScriptAbfssArgs.builder()
                        .destination("string")
                        .build())
                    .file(JobJobClusterNewClusterInitScriptFileArgs.builder()
                        .destination("string")
                        .build())
                    .gcs(JobJobClusterNewClusterInitScriptGcsArgs.builder()
                        .destination("string")
                        .build())
                    .s3(JobJobClusterNewClusterInitScriptS3Args.builder()
                        .destination("string")
                        .cannedAcl("string")
                        .enableEncryption(false)
                        .encryptionType("string")
                        .endpoint("string")
                        .kmsKey("string")
                        .region("string")
                        .build())
                    .volumes(JobJobClusterNewClusterInitScriptVolumesArgs.builder()
                        .destination("string")
                        .build())
                    .workspace(JobJobClusterNewClusterInitScriptWorkspaceArgs.builder()
                        .destination("string")
                        .build())
                    .build())
                .enableElasticDisk(false)
                .applyPolicyDefaultValues(false)
                .azureAttributes(JobJobClusterNewClusterAzureAttributesArgs.builder()
                    .availability("string")
                    .firstOnDemand(0)
                    .logAnalyticsInfo(JobJobClusterNewClusterAzureAttributesLogAnalyticsInfoArgs.builder()
                        .logAnalyticsPrimaryKey("string")
                        .logAnalyticsWorkspaceId("string")
                        .build())
                    .spotBidMaxPrice(0)
                    .build())
                .awsAttributes(JobJobClusterNewClusterAwsAttributesArgs.builder()
                    .availability("string")
                    .ebsVolumeCount(0)
                    .ebsVolumeIops(0)
                    .ebsVolumeSize(0)
                    .ebsVolumeThroughput(0)
                    .ebsVolumeType("string")
                    .firstOnDemand(0)
                    .instanceProfileArn("string")
                    .spotBidPricePercent(0)
                    .zoneId("string")
                    .build())
                .driverNodeTypeId("string")
                .instancePoolId("string")
                .libraries(JobJobClusterNewClusterLibraryArgs.builder()
                    .cran(JobJobClusterNewClusterLibraryCranArgs.builder()
                        .package_("string")
                        .repo("string")
                        .build())
                    .egg("string")
                    .jar("string")
                    .maven(JobJobClusterNewClusterLibraryMavenArgs.builder()
                        .coordinates("string")
                        .exclusions("string")
                        .repo("string")
                        .build())
                    .pypi(JobJobClusterNewClusterLibraryPypiArgs.builder()
                        .package_("string")
                        .repo("string")
                        .build())
                    .requirements("string")
                    .whl("string")
                    .build())
                .nodeTypeId("string")
                .numWorkers(0)
                .policyId("string")
                .runtimeEngine("string")
                .singleUserName("string")
                .sparkConf(Map.of("string", "string"))
                .sparkEnvVars(Map.of("string", "string"))
                .autoscale(JobJobClusterNewClusterAutoscaleArgs.builder()
                    .maxWorkers(0)
                    .minWorkers(0)
                    .build())
                .sshPublicKeys("string")
                .workloadType(JobJobClusterNewClusterWorkloadTypeArgs.builder()
                    .clients(JobJobClusterNewClusterWorkloadTypeClientsArgs.builder()
                        .jobs(false)
                        .notebooks(false)
                        .build())
                    .build())
                .build())
            .build())
        .libraries(JobLibraryArgs.builder()
            .cran(JobLibraryCranArgs.builder()
                .package_("string")
                .repo("string")
                .build())
            .egg("string")
            .jar("string")
            .maven(JobLibraryMavenArgs.builder()
                .coordinates("string")
                .exclusions("string")
                .repo("string")
                .build())
            .pypi(JobLibraryPypiArgs.builder()
                .package_("string")
                .repo("string")
                .build())
            .requirements("string")
            .whl("string")
            .build())
        .maxConcurrentRuns(0)
        .name("string")
        .newCluster(JobNewClusterArgs.builder()
            .sparkVersion("string")
            .enableLocalDiskEncryption(false)
            .clusterLogConf(JobNewClusterClusterLogConfArgs.builder()
                .dbfs(JobNewClusterClusterLogConfDbfsArgs.builder()
                    .destination("string")
                    .build())
                .s3(JobNewClusterClusterLogConfS3Args.builder()
                    .destination("string")
                    .cannedAcl("string")
                    .enableEncryption(false)
                    .encryptionType("string")
                    .endpoint("string")
                    .kmsKey("string")
                    .region("string")
                    .build())
                .build())
            .gcpAttributes(JobNewClusterGcpAttributesArgs.builder()
                .availability("string")
                .bootDiskSize(0)
                .googleServiceAccount("string")
                .localSsdCount(0)
                .usePreemptibleExecutors(false)
                .zoneId("string")
                .build())
            .clusterId("string")
            .idempotencyToken("string")
            .clusterMountInfos(JobNewClusterClusterMountInfoArgs.builder()
                .localMountDirPath("string")
                .networkFilesystemInfo(JobNewClusterClusterMountInfoNetworkFilesystemInfoArgs.builder()
                    .serverAddress("string")
                    .mountOptions("string")
                    .build())
                .remoteMountDirPath("string")
                .build())
            .clusterName("string")
            .customTags(Map.of("string", "string"))
            .dataSecurityMode("string")
            .dockerImage(JobNewClusterDockerImageArgs.builder()
                .url("string")
                .basicAuth(JobNewClusterDockerImageBasicAuthArgs.builder()
                    .password("string")
                    .username("string")
                    .build())
                .build())
            .driverInstancePoolId("string")
            .initScripts(JobNewClusterInitScriptArgs.builder()
                .abfss(JobNewClusterInitScriptAbfssArgs.builder()
                    .destination("string")
                    .build())
                .file(JobNewClusterInitScriptFileArgs.builder()
                    .destination("string")
                    .build())
                .gcs(JobNewClusterInitScriptGcsArgs.builder()
                    .destination("string")
                    .build())
                .s3(JobNewClusterInitScriptS3Args.builder()
                    .destination("string")
                    .cannedAcl("string")
                    .enableEncryption(false)
                    .encryptionType("string")
                    .endpoint("string")
                    .kmsKey("string")
                    .region("string")
                    .build())
                .volumes(JobNewClusterInitScriptVolumesArgs.builder()
                    .destination("string")
                    .build())
                .workspace(JobNewClusterInitScriptWorkspaceArgs.builder()
                    .destination("string")
                    .build())
                .build())
            .enableElasticDisk(false)
            .applyPolicyDefaultValues(false)
            .azureAttributes(JobNewClusterAzureAttributesArgs.builder()
                .availability("string")
                .firstOnDemand(0)
                .logAnalyticsInfo(JobNewClusterAzureAttributesLogAnalyticsInfoArgs.builder()
                    .logAnalyticsPrimaryKey("string")
                    .logAnalyticsWorkspaceId("string")
                    .build())
                .spotBidMaxPrice(0)
                .build())
            .awsAttributes(JobNewClusterAwsAttributesArgs.builder()
                .availability("string")
                .ebsVolumeCount(0)
                .ebsVolumeIops(0)
                .ebsVolumeSize(0)
                .ebsVolumeThroughput(0)
                .ebsVolumeType("string")
                .firstOnDemand(0)
                .instanceProfileArn("string")
                .spotBidPricePercent(0)
                .zoneId("string")
                .build())
            .driverNodeTypeId("string")
            .instancePoolId("string")
            .libraries(JobNewClusterLibraryArgs.builder()
                .cran(JobNewClusterLibraryCranArgs.builder()
                    .package_("string")
                    .repo("string")
                    .build())
                .egg("string")
                .jar("string")
                .maven(JobNewClusterLibraryMavenArgs.builder()
                    .coordinates("string")
                    .exclusions("string")
                    .repo("string")
                    .build())
                .pypi(JobNewClusterLibraryPypiArgs.builder()
                    .package_("string")
                    .repo("string")
                    .build())
                .requirements("string")
                .whl("string")
                .build())
            .nodeTypeId("string")
            .numWorkers(0)
            .policyId("string")
            .runtimeEngine("string")
            .singleUserName("string")
            .sparkConf(Map.of("string", "string"))
            .sparkEnvVars(Map.of("string", "string"))
            .autoscale(JobNewClusterAutoscaleArgs.builder()
                .maxWorkers(0)
                .minWorkers(0)
                .build())
            .sshPublicKeys("string")
            .workloadType(JobNewClusterWorkloadTypeArgs.builder()
                .clients(JobNewClusterWorkloadTypeClientsArgs.builder()
                    .jobs(false)
                    .notebooks(false)
                    .build())
                .build())
            .build())
        .notificationSettings(JobNotificationSettingsArgs.builder()
            .noAlertForCanceledRuns(false)
            .noAlertForSkippedRuns(false)
            .build())
        .parameters(JobParameterArgs.builder()
            .default_("string")
            .name("string")
            .build())
        .queue(JobQueueArgs.builder()
            .enabled(false)
            .build())
        .runAs(JobRunAsArgs.builder()
            .servicePrincipalName("string")
            .userName("string")
            .build())
        .schedule(JobScheduleArgs.builder()
            .quartzCronExpression("string")
            .timezoneId("string")
            .pauseStatus("string")
            .build())
        .tags(Map.of("string", "string"))
        .tasks(JobTaskArgs.builder()
            .taskKey("string")
            .newCluster(JobTaskNewClusterArgs.builder()
                .sparkVersion("string")
                .enableLocalDiskEncryption(false)
                .clusterLogConf(JobTaskNewClusterClusterLogConfArgs.builder()
                    .dbfs(JobTaskNewClusterClusterLogConfDbfsArgs.builder()
                        .destination("string")
                        .build())
                    .s3(JobTaskNewClusterClusterLogConfS3Args.builder()
                        .destination("string")
                        .cannedAcl("string")
                        .enableEncryption(false)
                        .encryptionType("string")
                        .endpoint("string")
                        .kmsKey("string")
                        .region("string")
                        .build())
                    .build())
                .gcpAttributes(JobTaskNewClusterGcpAttributesArgs.builder()
                    .availability("string")
                    .bootDiskSize(0)
                    .googleServiceAccount("string")
                    .localSsdCount(0)
                    .usePreemptibleExecutors(false)
                    .zoneId("string")
                    .build())
                .clusterId("string")
                .idempotencyToken("string")
                .clusterMountInfos(JobTaskNewClusterClusterMountInfoArgs.builder()
                    .localMountDirPath("string")
                    .networkFilesystemInfo(JobTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs.builder()
                        .serverAddress("string")
                        .mountOptions("string")
                        .build())
                    .remoteMountDirPath("string")
                    .build())
                .clusterName("string")
                .customTags(Map.of("string", "string"))
                .dataSecurityMode("string")
                .dockerImage(JobTaskNewClusterDockerImageArgs.builder()
                    .url("string")
                    .basicAuth(JobTaskNewClusterDockerImageBasicAuthArgs.builder()
                        .password("string")
                        .username("string")
                        .build())
                    .build())
                .driverInstancePoolId("string")
                .initScripts(JobTaskNewClusterInitScriptArgs.builder()
                    .abfss(JobTaskNewClusterInitScriptAbfssArgs.builder()
                        .destination("string")
                        .build())
                    .file(JobTaskNewClusterInitScriptFileArgs.builder()
                        .destination("string")
                        .build())
                    .gcs(JobTaskNewClusterInitScriptGcsArgs.builder()
                        .destination("string")
                        .build())
                    .s3(JobTaskNewClusterInitScriptS3Args.builder()
                        .destination("string")
                        .cannedAcl("string")
                        .enableEncryption(false)
                        .encryptionType("string")
                        .endpoint("string")
                        .kmsKey("string")
                        .region("string")
                        .build())
                    .volumes(JobTaskNewClusterInitScriptVolumesArgs.builder()
                        .destination("string")
                        .build())
                    .workspace(JobTaskNewClusterInitScriptWorkspaceArgs.builder()
                        .destination("string")
                        .build())
                    .build())
                .enableElasticDisk(false)
                .applyPolicyDefaultValues(false)
                .azureAttributes(JobTaskNewClusterAzureAttributesArgs.builder()
                    .availability("string")
                    .firstOnDemand(0)
                    .logAnalyticsInfo(JobTaskNewClusterAzureAttributesLogAnalyticsInfoArgs.builder()
                        .logAnalyticsPrimaryKey("string")
                        .logAnalyticsWorkspaceId("string")
                        .build())
                    .spotBidMaxPrice(0)
                    .build())
                .awsAttributes(JobTaskNewClusterAwsAttributesArgs.builder()
                    .availability("string")
                    .ebsVolumeCount(0)
                    .ebsVolumeIops(0)
                    .ebsVolumeSize(0)
                    .ebsVolumeThroughput(0)
                    .ebsVolumeType("string")
                    .firstOnDemand(0)
                    .instanceProfileArn("string")
                    .spotBidPricePercent(0)
                    .zoneId("string")
                    .build())
                .driverNodeTypeId("string")
                .instancePoolId("string")
                .libraries(JobTaskNewClusterLibraryArgs.builder()
                    .cran(JobTaskNewClusterLibraryCranArgs.builder()
                        .package_("string")
                        .repo("string")
                        .build())
                    .egg("string")
                    .jar("string")
                    .maven(JobTaskNewClusterLibraryMavenArgs.builder()
                        .coordinates("string")
                        .exclusions("string")
                        .repo("string")
                        .build())
                    .pypi(JobTaskNewClusterLibraryPypiArgs.builder()
                        .package_("string")
                        .repo("string")
                        .build())
                    .requirements("string")
                    .whl("string")
                    .build())
                .nodeTypeId("string")
                .numWorkers(0)
                .policyId("string")
                .runtimeEngine("string")
                .singleUserName("string")
                .sparkConf(Map.of("string", "string"))
                .sparkEnvVars(Map.of("string", "string"))
                .autoscale(JobTaskNewClusterAutoscaleArgs.builder()
                    .maxWorkers(0)
                    .minWorkers(0)
                    .build())
                .sshPublicKeys("string")
                .workloadType(JobTaskNewClusterWorkloadTypeArgs.builder()
                    .clients(JobTaskNewClusterWorkloadTypeClientsArgs.builder()
                        .jobs(false)
                        .notebooks(false)
                        .build())
                    .build())
                .build())
            .dbtTask(JobTaskDbtTaskArgs.builder()
                .commands("string")
                .catalog("string")
                .profilesDirectory("string")
                .projectDirectory("string")
                .schema("string")
                .source("string")
                .warehouseId("string")
                .build())
            .description("string")
            .disableAutoOptimization(false)
            .emailNotifications(JobTaskEmailNotificationsArgs.builder()
                .noAlertForSkippedRuns(false)
                .onDurationWarningThresholdExceededs("string")
                .onFailures("string")
                .onStarts("string")
                .onStreamingBacklogExceededs("string")
                .onSuccesses("string")
                .build())
            .environmentKey("string")
            .existingClusterId("string")
            .forEachTask(JobTaskForEachTaskArgs.builder()
                .inputs("string")
                .task(JobTaskForEachTaskTaskArgs.builder()
                    .taskKey("string")
                    .notebookTask(JobTaskForEachTaskTaskNotebookTaskArgs.builder()
                        .notebookPath("string")
                        .baseParameters(Map.of("string", "string"))
                        .source("string")
                        .warehouseId("string")
                        .build())
                    .webhookNotifications(JobTaskForEachTaskTaskWebhookNotificationsArgs.builder()
                        .onDurationWarningThresholdExceededs(JobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs.builder()
                            .id("string")
                            .build())
                        .onFailures(JobTaskForEachTaskTaskWebhookNotificationsOnFailureArgs.builder()
                            .id("string")
                            .build())
                        .onStarts(JobTaskForEachTaskTaskWebhookNotificationsOnStartArgs.builder()
                            .id("string")
                            .build())
                        .onStreamingBacklogExceededs(JobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceededArgs.builder()
                            .id("string")
                            .build())
                        .onSuccesses(JobTaskForEachTaskTaskWebhookNotificationsOnSuccessArgs.builder()
                            .id("string")
                            .build())
                        .build())
                    .newCluster(JobTaskForEachTaskTaskNewClusterArgs.builder()
                        .sparkVersion("string")
                        .enableLocalDiskEncryption(false)
                        .clusterLogConf(JobTaskForEachTaskTaskNewClusterClusterLogConfArgs.builder()
                            .dbfs(JobTaskForEachTaskTaskNewClusterClusterLogConfDbfsArgs.builder()
                                .destination("string")
                                .build())
                            .s3(JobTaskForEachTaskTaskNewClusterClusterLogConfS3Args.builder()
                                .destination("string")
                                .cannedAcl("string")
                                .enableEncryption(false)
                                .encryptionType("string")
                                .endpoint("string")
                                .kmsKey("string")
                                .region("string")
                                .build())
                            .build())
                        .gcpAttributes(JobTaskForEachTaskTaskNewClusterGcpAttributesArgs.builder()
                            .availability("string")
                            .bootDiskSize(0)
                            .googleServiceAccount("string")
                            .localSsdCount(0)
                            .usePreemptibleExecutors(false)
                            .zoneId("string")
                            .build())
                        .clusterId("string")
                        .idempotencyToken("string")
                        .clusterMountInfos(JobTaskForEachTaskTaskNewClusterClusterMountInfoArgs.builder()
                            .localMountDirPath("string")
                            .networkFilesystemInfo(JobTaskForEachTaskTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs.builder()
                                .serverAddress("string")
                                .mountOptions("string")
                                .build())
                            .remoteMountDirPath("string")
                            .build())
                        .clusterName("string")
                        .customTags(Map.of("string", "string"))
                        .dataSecurityMode("string")
                        .dockerImage(JobTaskForEachTaskTaskNewClusterDockerImageArgs.builder()
                            .url("string")
                            .basicAuth(JobTaskForEachTaskTaskNewClusterDockerImageBasicAuthArgs.builder()
                                .password("string")
                                .username("string")
                                .build())
                            .build())
                        .driverInstancePoolId("string")
                        .initScripts(JobTaskForEachTaskTaskNewClusterInitScriptArgs.builder()
                            .abfss(JobTaskForEachTaskTaskNewClusterInitScriptAbfssArgs.builder()
                                .destination("string")
                                .build())
                            .file(JobTaskForEachTaskTaskNewClusterInitScriptFileArgs.builder()
                                .destination("string")
                                .build())
                            .gcs(JobTaskForEachTaskTaskNewClusterInitScriptGcsArgs.builder()
                                .destination("string")
                                .build())
                            .s3(JobTaskForEachTaskTaskNewClusterInitScriptS3Args.builder()
                                .destination("string")
                                .cannedAcl("string")
                                .enableEncryption(false)
                                .encryptionType("string")
                                .endpoint("string")
                                .kmsKey("string")
                                .region("string")
                                .build())
                            .volumes(JobTaskForEachTaskTaskNewClusterInitScriptVolumesArgs.builder()
                                .destination("string")
                                .build())
                            .workspace(JobTaskForEachTaskTaskNewClusterInitScriptWorkspaceArgs.builder()
                                .destination("string")
                                .build())
                            .build())
                        .enableElasticDisk(false)
                        .applyPolicyDefaultValues(false)
                        .azureAttributes(JobTaskForEachTaskTaskNewClusterAzureAttributesArgs.builder()
                            .availability("string")
                            .firstOnDemand(0)
                            .logAnalyticsInfo(JobTaskForEachTaskTaskNewClusterAzureAttributesLogAnalyticsInfoArgs.builder()
                                .logAnalyticsPrimaryKey("string")
                                .logAnalyticsWorkspaceId("string")
                                .build())
                            .spotBidMaxPrice(0)
                            .build())
                        .awsAttributes(JobTaskForEachTaskTaskNewClusterAwsAttributesArgs.builder()
                            .availability("string")
                            .ebsVolumeCount(0)
                            .ebsVolumeIops(0)
                            .ebsVolumeSize(0)
                            .ebsVolumeThroughput(0)
                            .ebsVolumeType("string")
                            .firstOnDemand(0)
                            .instanceProfileArn("string")
                            .spotBidPricePercent(0)
                            .zoneId("string")
                            .build())
                        .driverNodeTypeId("string")
                        .instancePoolId("string")
                        .libraries(JobTaskForEachTaskTaskNewClusterLibraryArgs.builder()
                            .cran(JobTaskForEachTaskTaskNewClusterLibraryCranArgs.builder()
                                .package_("string")
                                .repo("string")
                                .build())
                            .egg("string")
                            .jar("string")
                            .maven(JobTaskForEachTaskTaskNewClusterLibraryMavenArgs.builder()
                                .coordinates("string")
                                .exclusions("string")
                                .repo("string")
                                .build())
                            .pypi(JobTaskForEachTaskTaskNewClusterLibraryPypiArgs.builder()
                                .package_("string")
                                .repo("string")
                                .build())
                            .requirements("string")
                            .whl("string")
                            .build())
                        .nodeTypeId("string")
                        .numWorkers(0)
                        .policyId("string")
                        .runtimeEngine("string")
                        .singleUserName("string")
                        .sparkConf(Map.of("string", "string"))
                        .sparkEnvVars(Map.of("string", "string"))
                        .autoscale(JobTaskForEachTaskTaskNewClusterAutoscaleArgs.builder()
                            .maxWorkers(0)
                            .minWorkers(0)
                            .build())
                        .sshPublicKeys("string")
                        .workloadType(JobTaskForEachTaskTaskNewClusterWorkloadTypeArgs.builder()
                            .clients(JobTaskForEachTaskTaskNewClusterWorkloadTypeClientsArgs.builder()
                                .jobs(false)
                                .notebooks(false)
                                .build())
                            .build())
                        .build())
                    .disableAutoOptimization(false)
                    .emailNotifications(JobTaskForEachTaskTaskEmailNotificationsArgs.builder()
                        .noAlertForSkippedRuns(false)
                        .onDurationWarningThresholdExceededs("string")
                        .onFailures("string")
                        .onStarts("string")
                        .onStreamingBacklogExceededs("string")
                        .onSuccesses("string")
                        .build())
                    .environmentKey("string")
                    .existingClusterId("string")
                    .health(JobTaskForEachTaskTaskHealthArgs.builder()
                        .rules(JobTaskForEachTaskTaskHealthRuleArgs.builder()
                            .metric("string")
                            .op("string")
                            .value(0)
                            .build())
                        .build())
                    .jobClusterKey("string")
                    .libraries(JobTaskForEachTaskTaskLibraryArgs.builder()
                        .cran(JobTaskForEachTaskTaskLibraryCranArgs.builder()
                            .package_("string")
                            .repo("string")
                            .build())
                        .egg("string")
                        .jar("string")
                        .maven(JobTaskForEachTaskTaskLibraryMavenArgs.builder()
                            .coordinates("string")
                            .exclusions("string")
                            .repo("string")
                            .build())
                        .pypi(JobTaskForEachTaskTaskLibraryPypiArgs.builder()
                            .package_("string")
                            .repo("string")
                            .build())
                        .requirements("string")
                        .whl("string")
                        .build())
                    .maxRetries(0)
                    .minRetryIntervalMillis(0)
                    .description("string")
                    .dependsOns(JobTaskForEachTaskTaskDependsOnArgs.builder()
                        .taskKey("string")
                        .outcome("string")
                        .build())
                    .sparkPythonTask(JobTaskForEachTaskTaskSparkPythonTaskArgs.builder()
                        .pythonFile("string")
                        .parameters("string")
                        .source("string")
                        .build())
                    .pipelineTask(JobTaskForEachTaskTaskPipelineTaskArgs.builder()
                        .pipelineId("string")
                        .fullRefresh(false)
                        .build())
                    .pythonWheelTask(JobTaskForEachTaskTaskPythonWheelTaskArgs.builder()
                        .entryPoint("string")
                        .namedParameters(Map.of("string", "string"))
                        .packageName("string")
                        .parameters("string")
                        .build())
                    .retryOnTimeout(false)
                    .runIf("string")
                    .runJobTask(JobTaskForEachTaskTaskRunJobTaskArgs.builder()
                        .jobId(0)
                        .dbtCommands("string")
                        .jarParams("string")
                        .jobParameters(Map.of("string", "string"))
                        .notebookParams(Map.of("string", "string"))
                        .pipelineParams(JobTaskForEachTaskTaskRunJobTaskPipelineParamsArgs.builder()
                            .fullRefresh(false)
                            .build())
                        .pythonNamedParams(Map.of("string", "string"))
                        .pythonParams("string")
                        .sparkSubmitParams("string")
                        .sqlParams(Map.of("string", "string"))
                        .build())
                    .sparkJarTask(JobTaskForEachTaskTaskSparkJarTaskArgs.builder()
                        .jarUri("string")
                        .mainClassName("string")
                        .parameters("string")
                        .build())
                    .notificationSettings(JobTaskForEachTaskTaskNotificationSettingsArgs.builder()
                        .alertOnLastAttempt(false)
                        .noAlertForCanceledRuns(false)
                        .noAlertForSkippedRuns(false)
                        .build())
                    .sparkSubmitTask(JobTaskForEachTaskTaskSparkSubmitTaskArgs.builder()
                        .parameters("string")
                        .build())
                    .sqlTask(JobTaskForEachTaskTaskSqlTaskArgs.builder()
                        .warehouseId("string")
                        .alert(JobTaskForEachTaskTaskSqlTaskAlertArgs.builder()
                            .alertId("string")
                            .subscriptions(JobTaskForEachTaskTaskSqlTaskAlertSubscriptionArgs.builder()
                                .destinationId("string")
                                .userName("string")
                                .build())
                            .pauseSubscriptions(false)
                            .build())
                        .dashboard(JobTaskForEachTaskTaskSqlTaskDashboardArgs.builder()
                            .dashboardId("string")
                            .customSubject("string")
                            .pauseSubscriptions(false)
                            .subscriptions(JobTaskForEachTaskTaskSqlTaskDashboardSubscriptionArgs.builder()
                                .destinationId("string")
                                .userName("string")
                                .build())
                            .build())
                        .file(JobTaskForEachTaskTaskSqlTaskFileArgs.builder()
                            .path("string")
                            .source("string")
                            .build())
                        .parameters(Map.of("string", "string"))
                        .query(JobTaskForEachTaskTaskSqlTaskQueryArgs.builder()
                            .queryId("string")
                            .build())
                        .build())
                    .dbtTask(JobTaskForEachTaskTaskDbtTaskArgs.builder()
                        .commands("string")
                        .catalog("string")
                        .profilesDirectory("string")
                        .projectDirectory("string")
                        .schema("string")
                        .source("string")
                        .warehouseId("string")
                        .build())
                    .timeoutSeconds(0)
                    .conditionTask(JobTaskForEachTaskTaskConditionTaskArgs.builder()
                        .left("string")
                        .op("string")
                        .right("string")
                        .build())
                    .build())
                .concurrency(0)
                .build())
            .health(JobTaskHealthArgs.builder()
                .rules(JobTaskHealthRuleArgs.builder()
                    .metric("string")
                    .op("string")
                    .value(0)
                    .build())
                .build())
            .jobClusterKey("string")
            .libraries(JobTaskLibraryArgs.builder()
                .cran(JobTaskLibraryCranArgs.builder()
                    .package_("string")
                    .repo("string")
                    .build())
                .egg("string")
                .jar("string")
                .maven(JobTaskLibraryMavenArgs.builder()
                    .coordinates("string")
                    .exclusions("string")
                    .repo("string")
                    .build())
                .pypi(JobTaskLibraryPypiArgs.builder()
                    .package_("string")
                    .repo("string")
                    .build())
                .requirements("string")
                .whl("string")
                .build())
            .maxRetries(0)
            .webhookNotifications(JobTaskWebhookNotificationsArgs.builder()
                .onDurationWarningThresholdExceededs(JobTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs.builder()
                    .id("string")
                    .build())
                .onFailures(JobTaskWebhookNotificationsOnFailureArgs.builder()
                    .id("string")
                    .build())
                .onStarts(JobTaskWebhookNotificationsOnStartArgs.builder()
                    .id("string")
                    .build())
                .onStreamingBacklogExceededs(JobTaskWebhookNotificationsOnStreamingBacklogExceededArgs.builder()
                    .id("string")
                    .build())
                .onSuccesses(JobTaskWebhookNotificationsOnSuccessArgs.builder()
                    .id("string")
                    .build())
                .build())
            .dependsOns(JobTaskDependsOnArgs.builder()
                .taskKey("string")
                .outcome("string")
                .build())
            .retryOnTimeout(false)
            .notificationSettings(JobTaskNotificationSettingsArgs.builder()
                .alertOnLastAttempt(false)
                .noAlertForCanceledRuns(false)
                .noAlertForSkippedRuns(false)
                .build())
            .pipelineTask(JobTaskPipelineTaskArgs.builder()
                .pipelineId("string")
                .fullRefresh(false)
                .build())
            .pythonWheelTask(JobTaskPythonWheelTaskArgs.builder()
                .entryPoint("string")
                .namedParameters(Map.of("string", "string"))
                .packageName("string")
                .parameters("string")
                .build())
            .notebookTask(JobTaskNotebookTaskArgs.builder()
                .notebookPath("string")
                .baseParameters(Map.of("string", "string"))
                .source("string")
                .warehouseId("string")
                .build())
            .runIf("string")
            .runJobTask(JobTaskRunJobTaskArgs.builder()
                .jobId(0)
                .dbtCommands("string")
                .jarParams("string")
                .jobParameters(Map.of("string", "string"))
                .notebookParams(Map.of("string", "string"))
                .pipelineParams(JobTaskRunJobTaskPipelineParamsArgs.builder()
                    .fullRefresh(false)
                    .build())
                .pythonNamedParams(Map.of("string", "string"))
                .pythonParams("string")
                .sparkSubmitParams("string")
                .sqlParams(Map.of("string", "string"))
                .build())
            .sparkJarTask(JobTaskSparkJarTaskArgs.builder()
                .jarUri("string")
                .mainClassName("string")
                .parameters("string")
                .build())
            .sparkPythonTask(JobTaskSparkPythonTaskArgs.builder()
                .pythonFile("string")
                .parameters("string")
                .source("string")
                .build())
            .sparkSubmitTask(JobTaskSparkSubmitTaskArgs.builder()
                .parameters("string")
                .build())
            .sqlTask(JobTaskSqlTaskArgs.builder()
                .warehouseId("string")
                .alert(JobTaskSqlTaskAlertArgs.builder()
                    .alertId("string")
                    .subscriptions(JobTaskSqlTaskAlertSubscriptionArgs.builder()
                        .destinationId("string")
                        .userName("string")
                        .build())
                    .pauseSubscriptions(false)
                    .build())
                .dashboard(JobTaskSqlTaskDashboardArgs.builder()
                    .dashboardId("string")
                    .customSubject("string")
                    .pauseSubscriptions(false)
                    .subscriptions(JobTaskSqlTaskDashboardSubscriptionArgs.builder()
                        .destinationId("string")
                        .userName("string")
                        .build())
                    .build())
                .file(JobTaskSqlTaskFileArgs.builder()
                    .path("string")
                    .source("string")
                    .build())
                .parameters(Map.of("string", "string"))
                .query(JobTaskSqlTaskQueryArgs.builder()
                    .queryId("string")
                    .build())
                .build())
            .conditionTask(JobTaskConditionTaskArgs.builder()
                .left("string")
                .op("string")
                .right("string")
                .build())
            .timeoutSeconds(0)
            .minRetryIntervalMillis(0)
            .build())
        .timeoutSeconds(0)
        .trigger(JobTriggerArgs.builder()
            .fileArrival(JobTriggerFileArrivalArgs.builder()
                .url("string")
                .minTimeBetweenTriggersSeconds(0)
                .waitAfterLastChangeSeconds(0)
                .build())
            .pauseStatus("string")
            .periodic(JobTriggerPeriodicArgs.builder()
                .interval(0)
                .unit("string")
                .build())
            .table(JobTriggerTableArgs.builder()
                .condition("string")
                .minTimeBetweenTriggersSeconds(0)
                .tableNames("string")
                .waitAfterLastChangeSeconds(0)
                .build())
            .tableUpdate(JobTriggerTableUpdateArgs.builder()
                .tableNames("string")
                .condition("string")
                .minTimeBetweenTriggersSeconds(0)
                .waitAfterLastChangeSeconds(0)
                .build())
            .build())
        .webhookNotifications(JobWebhookNotificationsArgs.builder()
            .onDurationWarningThresholdExceededs(JobWebhookNotificationsOnDurationWarningThresholdExceededArgs.builder()
                .id("string")
                .build())
            .onFailures(JobWebhookNotificationsOnFailureArgs.builder()
                .id("string")
                .build())
            .onStarts(JobWebhookNotificationsOnStartArgs.builder()
                .id("string")
                .build())
            .onStreamingBacklogExceededs(JobWebhookNotificationsOnStreamingBacklogExceededArgs.builder()
                .id("string")
                .build())
            .onSuccesses(JobWebhookNotificationsOnSuccessArgs.builder()
                .id("string")
                .build())
            .build())
        .build());
    
    job_resource = databricks.Job("jobResource",
        continuous=databricks.JobContinuousArgs(
            pause_status="string",
        ),
        control_run_state=False,
        deployment=databricks.JobDeploymentArgs(
            kind="string",
            metadata_file_path="string",
        ),
        description="string",
        edit_mode="string",
        email_notifications=databricks.JobEmailNotificationsArgs(
            no_alert_for_skipped_runs=False,
            on_duration_warning_threshold_exceededs=["string"],
            on_failures=["string"],
            on_starts=["string"],
            on_streaming_backlog_exceededs=["string"],
            on_successes=["string"],
        ),
        environments=[databricks.JobEnvironmentArgs(
            environment_key="string",
            spec=databricks.JobEnvironmentSpecArgs(
                client="string",
                dependencies=["string"],
            ),
        )],
        existing_cluster_id="string",
        format="string",
        git_source=databricks.JobGitSourceArgs(
            url="string",
            branch="string",
            commit="string",
            git_snapshot=databricks.JobGitSourceGitSnapshotArgs(
                used_commit="string",
            ),
            job_source=databricks.JobGitSourceJobSourceArgs(
                import_from_git_branch="string",
                job_config_path="string",
                dirty_state="string",
            ),
            provider="string",
            tag="string",
        ),
        health=databricks.JobHealthArgs(
            rules=[databricks.JobHealthRuleArgs(
                metric="string",
                op="string",
                value=0,
            )],
        ),
        job_clusters=[databricks.JobJobClusterArgs(
            job_cluster_key="string",
            new_cluster=databricks.JobJobClusterNewClusterArgs(
                spark_version="string",
                enable_local_disk_encryption=False,
                cluster_log_conf=databricks.JobJobClusterNewClusterClusterLogConfArgs(
                    dbfs=databricks.JobJobClusterNewClusterClusterLogConfDbfsArgs(
                        destination="string",
                    ),
                    s3=databricks.JobJobClusterNewClusterClusterLogConfS3Args(
                        destination="string",
                        canned_acl="string",
                        enable_encryption=False,
                        encryption_type="string",
                        endpoint="string",
                        kms_key="string",
                        region="string",
                    ),
                ),
                gcp_attributes=databricks.JobJobClusterNewClusterGcpAttributesArgs(
                    availability="string",
                    boot_disk_size=0,
                    google_service_account="string",
                    local_ssd_count=0,
                    use_preemptible_executors=False,
                    zone_id="string",
                ),
                cluster_id="string",
                idempotency_token="string",
                cluster_mount_infos=[databricks.JobJobClusterNewClusterClusterMountInfoArgs(
                    local_mount_dir_path="string",
                    network_filesystem_info=databricks.JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoArgs(
                        server_address="string",
                        mount_options="string",
                    ),
                    remote_mount_dir_path="string",
                )],
                cluster_name="string",
                custom_tags={
                    "string": "string",
                },
                data_security_mode="string",
                docker_image=databricks.JobJobClusterNewClusterDockerImageArgs(
                    url="string",
                    basic_auth=databricks.JobJobClusterNewClusterDockerImageBasicAuthArgs(
                        password="string",
                        username="string",
                    ),
                ),
                driver_instance_pool_id="string",
                init_scripts=[databricks.JobJobClusterNewClusterInitScriptArgs(
                    abfss=databricks.JobJobClusterNewClusterInitScriptAbfssArgs(
                        destination="string",
                    ),
                    file=databricks.JobJobClusterNewClusterInitScriptFileArgs(
                        destination="string",
                    ),
                    gcs=databricks.JobJobClusterNewClusterInitScriptGcsArgs(
                        destination="string",
                    ),
                    s3=databricks.JobJobClusterNewClusterInitScriptS3Args(
                        destination="string",
                        canned_acl="string",
                        enable_encryption=False,
                        encryption_type="string",
                        endpoint="string",
                        kms_key="string",
                        region="string",
                    ),
                    volumes=databricks.JobJobClusterNewClusterInitScriptVolumesArgs(
                        destination="string",
                    ),
                    workspace=databricks.JobJobClusterNewClusterInitScriptWorkspaceArgs(
                        destination="string",
                    ),
                )],
                enable_elastic_disk=False,
                apply_policy_default_values=False,
                azure_attributes=databricks.JobJobClusterNewClusterAzureAttributesArgs(
                    availability="string",
                    first_on_demand=0,
                    log_analytics_info=databricks.JobJobClusterNewClusterAzureAttributesLogAnalyticsInfoArgs(
                        log_analytics_primary_key="string",
                        log_analytics_workspace_id="string",
                    ),
                    spot_bid_max_price=0,
                ),
                aws_attributes=databricks.JobJobClusterNewClusterAwsAttributesArgs(
                    availability="string",
                    ebs_volume_count=0,
                    ebs_volume_iops=0,
                    ebs_volume_size=0,
                    ebs_volume_throughput=0,
                    ebs_volume_type="string",
                    first_on_demand=0,
                    instance_profile_arn="string",
                    spot_bid_price_percent=0,
                    zone_id="string",
                ),
                driver_node_type_id="string",
                instance_pool_id="string",
                libraries=[databricks.JobJobClusterNewClusterLibraryArgs(
                    cran=databricks.JobJobClusterNewClusterLibraryCranArgs(
                        package="string",
                        repo="string",
                    ),
                    egg="string",
                    jar="string",
                    maven=databricks.JobJobClusterNewClusterLibraryMavenArgs(
                        coordinates="string",
                        exclusions=["string"],
                        repo="string",
                    ),
                    pypi=databricks.JobJobClusterNewClusterLibraryPypiArgs(
                        package="string",
                        repo="string",
                    ),
                    requirements="string",
                    whl="string",
                )],
                node_type_id="string",
                num_workers=0,
                policy_id="string",
                runtime_engine="string",
                single_user_name="string",
                spark_conf={
                    "string": "string",
                },
                spark_env_vars={
                    "string": "string",
                },
                autoscale=databricks.JobJobClusterNewClusterAutoscaleArgs(
                    max_workers=0,
                    min_workers=0,
                ),
                ssh_public_keys=["string"],
                workload_type=databricks.JobJobClusterNewClusterWorkloadTypeArgs(
                    clients=databricks.JobJobClusterNewClusterWorkloadTypeClientsArgs(
                        jobs=False,
                        notebooks=False,
                    ),
                ),
            ),
        )],
        libraries=[databricks.JobLibraryArgs(
            cran=databricks.JobLibraryCranArgs(
                package="string",
                repo="string",
            ),
            egg="string",
            jar="string",
            maven=databricks.JobLibraryMavenArgs(
                coordinates="string",
                exclusions=["string"],
                repo="string",
            ),
            pypi=databricks.JobLibraryPypiArgs(
                package="string",
                repo="string",
            ),
            requirements="string",
            whl="string",
        )],
        max_concurrent_runs=0,
        name="string",
        new_cluster=databricks.JobNewClusterArgs(
            spark_version="string",
            enable_local_disk_encryption=False,
            cluster_log_conf=databricks.JobNewClusterClusterLogConfArgs(
                dbfs=databricks.JobNewClusterClusterLogConfDbfsArgs(
                    destination="string",
                ),
                s3=databricks.JobNewClusterClusterLogConfS3Args(
                    destination="string",
                    canned_acl="string",
                    enable_encryption=False,
                    encryption_type="string",
                    endpoint="string",
                    kms_key="string",
                    region="string",
                ),
            ),
            gcp_attributes=databricks.JobNewClusterGcpAttributesArgs(
                availability="string",
                boot_disk_size=0,
                google_service_account="string",
                local_ssd_count=0,
                use_preemptible_executors=False,
                zone_id="string",
            ),
            cluster_id="string",
            idempotency_token="string",
            cluster_mount_infos=[databricks.JobNewClusterClusterMountInfoArgs(
                local_mount_dir_path="string",
                network_filesystem_info=databricks.JobNewClusterClusterMountInfoNetworkFilesystemInfoArgs(
                    server_address="string",
                    mount_options="string",
                ),
                remote_mount_dir_path="string",
            )],
            cluster_name="string",
            custom_tags={
                "string": "string",
            },
            data_security_mode="string",
            docker_image=databricks.JobNewClusterDockerImageArgs(
                url="string",
                basic_auth=databricks.JobNewClusterDockerImageBasicAuthArgs(
                    password="string",
                    username="string",
                ),
            ),
            driver_instance_pool_id="string",
            init_scripts=[databricks.JobNewClusterInitScriptArgs(
                abfss=databricks.JobNewClusterInitScriptAbfssArgs(
                    destination="string",
                ),
                file=databricks.JobNewClusterInitScriptFileArgs(
                    destination="string",
                ),
                gcs=databricks.JobNewClusterInitScriptGcsArgs(
                    destination="string",
                ),
                s3=databricks.JobNewClusterInitScriptS3Args(
                    destination="string",
                    canned_acl="string",
                    enable_encryption=False,
                    encryption_type="string",
                    endpoint="string",
                    kms_key="string",
                    region="string",
                ),
                volumes=databricks.JobNewClusterInitScriptVolumesArgs(
                    destination="string",
                ),
                workspace=databricks.JobNewClusterInitScriptWorkspaceArgs(
                    destination="string",
                ),
            )],
            enable_elastic_disk=False,
            apply_policy_default_values=False,
            azure_attributes=databricks.JobNewClusterAzureAttributesArgs(
                availability="string",
                first_on_demand=0,
                log_analytics_info=databricks.JobNewClusterAzureAttributesLogAnalyticsInfoArgs(
                    log_analytics_primary_key="string",
                    log_analytics_workspace_id="string",
                ),
                spot_bid_max_price=0,
            ),
            aws_attributes=databricks.JobNewClusterAwsAttributesArgs(
                availability="string",
                ebs_volume_count=0,
                ebs_volume_iops=0,
                ebs_volume_size=0,
                ebs_volume_throughput=0,
                ebs_volume_type="string",
                first_on_demand=0,
                instance_profile_arn="string",
                spot_bid_price_percent=0,
                zone_id="string",
            ),
            driver_node_type_id="string",
            instance_pool_id="string",
            libraries=[databricks.JobNewClusterLibraryArgs(
                cran=databricks.JobNewClusterLibraryCranArgs(
                    package="string",
                    repo="string",
                ),
                egg="string",
                jar="string",
                maven=databricks.JobNewClusterLibraryMavenArgs(
                    coordinates="string",
                    exclusions=["string"],
                    repo="string",
                ),
                pypi=databricks.JobNewClusterLibraryPypiArgs(
                    package="string",
                    repo="string",
                ),
                requirements="string",
                whl="string",
            )],
            node_type_id="string",
            num_workers=0,
            policy_id="string",
            runtime_engine="string",
            single_user_name="string",
            spark_conf={
                "string": "string",
            },
            spark_env_vars={
                "string": "string",
            },
            autoscale=databricks.JobNewClusterAutoscaleArgs(
                max_workers=0,
                min_workers=0,
            ),
            ssh_public_keys=["string"],
            workload_type=databricks.JobNewClusterWorkloadTypeArgs(
                clients=databricks.JobNewClusterWorkloadTypeClientsArgs(
                    jobs=False,
                    notebooks=False,
                ),
            ),
        ),
        notification_settings=databricks.JobNotificationSettingsArgs(
            no_alert_for_canceled_runs=False,
            no_alert_for_skipped_runs=False,
        ),
        parameters=[databricks.JobParameterArgs(
            default="string",
            name="string",
        )],
        queue=databricks.JobQueueArgs(
            enabled=False,
        ),
        run_as=databricks.JobRunAsArgs(
            service_principal_name="string",
            user_name="string",
        ),
        schedule=databricks.JobScheduleArgs(
            quartz_cron_expression="string",
            timezone_id="string",
            pause_status="string",
        ),
        tags={
            "string": "string",
        },
        tasks=[databricks.JobTaskArgs(
            task_key="string",
            new_cluster=databricks.JobTaskNewClusterArgs(
                spark_version="string",
                enable_local_disk_encryption=False,
                cluster_log_conf=databricks.JobTaskNewClusterClusterLogConfArgs(
                    dbfs=databricks.JobTaskNewClusterClusterLogConfDbfsArgs(
                        destination="string",
                    ),
                    s3=databricks.JobTaskNewClusterClusterLogConfS3Args(
                        destination="string",
                        canned_acl="string",
                        enable_encryption=False,
                        encryption_type="string",
                        endpoint="string",
                        kms_key="string",
                        region="string",
                    ),
                ),
                gcp_attributes=databricks.JobTaskNewClusterGcpAttributesArgs(
                    availability="string",
                    boot_disk_size=0,
                    google_service_account="string",
                    local_ssd_count=0,
                    use_preemptible_executors=False,
                    zone_id="string",
                ),
                cluster_id="string",
                idempotency_token="string",
                cluster_mount_infos=[databricks.JobTaskNewClusterClusterMountInfoArgs(
                    local_mount_dir_path="string",
                    network_filesystem_info=databricks.JobTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs(
                        server_address="string",
                        mount_options="string",
                    ),
                    remote_mount_dir_path="string",
                )],
                cluster_name="string",
                custom_tags={
                    "string": "string",
                },
                data_security_mode="string",
                docker_image=databricks.JobTaskNewClusterDockerImageArgs(
                    url="string",
                    basic_auth=databricks.JobTaskNewClusterDockerImageBasicAuthArgs(
                        password="string",
                        username="string",
                    ),
                ),
                driver_instance_pool_id="string",
                init_scripts=[databricks.JobTaskNewClusterInitScriptArgs(
                    abfss=databricks.JobTaskNewClusterInitScriptAbfssArgs(
                        destination="string",
                    ),
                    file=databricks.JobTaskNewClusterInitScriptFileArgs(
                        destination="string",
                    ),
                    gcs=databricks.JobTaskNewClusterInitScriptGcsArgs(
                        destination="string",
                    ),
                    s3=databricks.JobTaskNewClusterInitScriptS3Args(
                        destination="string",
                        canned_acl="string",
                        enable_encryption=False,
                        encryption_type="string",
                        endpoint="string",
                        kms_key="string",
                        region="string",
                    ),
                    volumes=databricks.JobTaskNewClusterInitScriptVolumesArgs(
                        destination="string",
                    ),
                    workspace=databricks.JobTaskNewClusterInitScriptWorkspaceArgs(
                        destination="string",
                    ),
                )],
                enable_elastic_disk=False,
                apply_policy_default_values=False,
                azure_attributes=databricks.JobTaskNewClusterAzureAttributesArgs(
                    availability="string",
                    first_on_demand=0,
                    log_analytics_info=databricks.JobTaskNewClusterAzureAttributesLogAnalyticsInfoArgs(
                        log_analytics_primary_key="string",
                        log_analytics_workspace_id="string",
                    ),
                    spot_bid_max_price=0,
                ),
                aws_attributes=databricks.JobTaskNewClusterAwsAttributesArgs(
                    availability="string",
                    ebs_volume_count=0,
                    ebs_volume_iops=0,
                    ebs_volume_size=0,
                    ebs_volume_throughput=0,
                    ebs_volume_type="string",
                    first_on_demand=0,
                    instance_profile_arn="string",
                    spot_bid_price_percent=0,
                    zone_id="string",
                ),
                driver_node_type_id="string",
                instance_pool_id="string",
                libraries=[databricks.JobTaskNewClusterLibraryArgs(
                    cran=databricks.JobTaskNewClusterLibraryCranArgs(
                        package="string",
                        repo="string",
                    ),
                    egg="string",
                    jar="string",
                    maven=databricks.JobTaskNewClusterLibraryMavenArgs(
                        coordinates="string",
                        exclusions=["string"],
                        repo="string",
                    ),
                    pypi=databricks.JobTaskNewClusterLibraryPypiArgs(
                        package="string",
                        repo="string",
                    ),
                    requirements="string",
                    whl="string",
                )],
                node_type_id="string",
                num_workers=0,
                policy_id="string",
                runtime_engine="string",
                single_user_name="string",
                spark_conf={
                    "string": "string",
                },
                spark_env_vars={
                    "string": "string",
                },
                autoscale=databricks.JobTaskNewClusterAutoscaleArgs(
                    max_workers=0,
                    min_workers=0,
                ),
                ssh_public_keys=["string"],
                workload_type=databricks.JobTaskNewClusterWorkloadTypeArgs(
                    clients=databricks.JobTaskNewClusterWorkloadTypeClientsArgs(
                        jobs=False,
                        notebooks=False,
                    ),
                ),
            ),
            dbt_task=databricks.JobTaskDbtTaskArgs(
                commands=["string"],
                catalog="string",
                profiles_directory="string",
                project_directory="string",
                schema="string",
                source="string",
                warehouse_id="string",
            ),
            description="string",
            disable_auto_optimization=False,
            email_notifications=databricks.JobTaskEmailNotificationsArgs(
                no_alert_for_skipped_runs=False,
                on_duration_warning_threshold_exceededs=["string"],
                on_failures=["string"],
                on_starts=["string"],
                on_streaming_backlog_exceededs=["string"],
                on_successes=["string"],
            ),
            environment_key="string",
            existing_cluster_id="string",
            for_each_task=databricks.JobTaskForEachTaskArgs(
                inputs="string",
                task=databricks.JobTaskForEachTaskTaskArgs(
                    task_key="string",
                    notebook_task=databricks.JobTaskForEachTaskTaskNotebookTaskArgs(
                        notebook_path="string",
                        base_parameters={
                            "string": "string",
                        },
                        source="string",
                        warehouse_id="string",
                    ),
                    webhook_notifications=databricks.JobTaskForEachTaskTaskWebhookNotificationsArgs(
                        on_duration_warning_threshold_exceededs=[databricks.JobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs(
                            id="string",
                        )],
                        on_failures=[databricks.JobTaskForEachTaskTaskWebhookNotificationsOnFailureArgs(
                            id="string",
                        )],
                        on_starts=[databricks.JobTaskForEachTaskTaskWebhookNotificationsOnStartArgs(
                            id="string",
                        )],
                        on_streaming_backlog_exceededs=[databricks.JobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceededArgs(
                            id="string",
                        )],
                        on_successes=[databricks.JobTaskForEachTaskTaskWebhookNotificationsOnSuccessArgs(
                            id="string",
                        )],
                    ),
                    new_cluster=databricks.JobTaskForEachTaskTaskNewClusterArgs(
                        spark_version="string",
                        enable_local_disk_encryption=False,
                        cluster_log_conf=databricks.JobTaskForEachTaskTaskNewClusterClusterLogConfArgs(
                            dbfs=databricks.JobTaskForEachTaskTaskNewClusterClusterLogConfDbfsArgs(
                                destination="string",
                            ),
                            s3=databricks.JobTaskForEachTaskTaskNewClusterClusterLogConfS3Args(
                                destination="string",
                                canned_acl="string",
                                enable_encryption=False,
                                encryption_type="string",
                                endpoint="string",
                                kms_key="string",
                                region="string",
                            ),
                        ),
                        gcp_attributes=databricks.JobTaskForEachTaskTaskNewClusterGcpAttributesArgs(
                            availability="string",
                            boot_disk_size=0,
                            google_service_account="string",
                            local_ssd_count=0,
                            use_preemptible_executors=False,
                            zone_id="string",
                        ),
                        cluster_id="string",
                        idempotency_token="string",
                        cluster_mount_infos=[databricks.JobTaskForEachTaskTaskNewClusterClusterMountInfoArgs(
                            local_mount_dir_path="string",
                            network_filesystem_info=databricks.JobTaskForEachTaskTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs(
                                server_address="string",
                                mount_options="string",
                            ),
                            remote_mount_dir_path="string",
                        )],
                        cluster_name="string",
                        custom_tags={
                            "string": "string",
                        },
                        data_security_mode="string",
                        docker_image=databricks.JobTaskForEachTaskTaskNewClusterDockerImageArgs(
                            url="string",
                            basic_auth=databricks.JobTaskForEachTaskTaskNewClusterDockerImageBasicAuthArgs(
                                password="string",
                                username="string",
                            ),
                        ),
                        driver_instance_pool_id="string",
                        init_scripts=[databricks.JobTaskForEachTaskTaskNewClusterInitScriptArgs(
                            abfss=databricks.JobTaskForEachTaskTaskNewClusterInitScriptAbfssArgs(
                                destination="string",
                            ),
                            file=databricks.JobTaskForEachTaskTaskNewClusterInitScriptFileArgs(
                                destination="string",
                            ),
                            gcs=databricks.JobTaskForEachTaskTaskNewClusterInitScriptGcsArgs(
                                destination="string",
                            ),
                            s3=databricks.JobTaskForEachTaskTaskNewClusterInitScriptS3Args(
                                destination="string",
                                canned_acl="string",
                                enable_encryption=False,
                                encryption_type="string",
                                endpoint="string",
                                kms_key="string",
                                region="string",
                            ),
                            volumes=databricks.JobTaskForEachTaskTaskNewClusterInitScriptVolumesArgs(
                                destination="string",
                            ),
                            workspace=databricks.JobTaskForEachTaskTaskNewClusterInitScriptWorkspaceArgs(
                                destination="string",
                            ),
                        )],
                        enable_elastic_disk=False,
                        apply_policy_default_values=False,
                        azure_attributes=databricks.JobTaskForEachTaskTaskNewClusterAzureAttributesArgs(
                            availability="string",
                            first_on_demand=0,
                            log_analytics_info=databricks.JobTaskForEachTaskTaskNewClusterAzureAttributesLogAnalyticsInfoArgs(
                                log_analytics_primary_key="string",
                                log_analytics_workspace_id="string",
                            ),
                            spot_bid_max_price=0,
                        ),
                        aws_attributes=databricks.JobTaskForEachTaskTaskNewClusterAwsAttributesArgs(
                            availability="string",
                            ebs_volume_count=0,
                            ebs_volume_iops=0,
                            ebs_volume_size=0,
                            ebs_volume_throughput=0,
                            ebs_volume_type="string",
                            first_on_demand=0,
                            instance_profile_arn="string",
                            spot_bid_price_percent=0,
                            zone_id="string",
                        ),
                        driver_node_type_id="string",
                        instance_pool_id="string",
                        libraries=[databricks.JobTaskForEachTaskTaskNewClusterLibraryArgs(
                            cran=databricks.JobTaskForEachTaskTaskNewClusterLibraryCranArgs(
                                package="string",
                                repo="string",
                            ),
                            egg="string",
                            jar="string",
                            maven=databricks.JobTaskForEachTaskTaskNewClusterLibraryMavenArgs(
                                coordinates="string",
                                exclusions=["string"],
                                repo="string",
                            ),
                            pypi=databricks.JobTaskForEachTaskTaskNewClusterLibraryPypiArgs(
                                package="string",
                                repo="string",
                            ),
                            requirements="string",
                            whl="string",
                        )],
                        node_type_id="string",
                        num_workers=0,
                        policy_id="string",
                        runtime_engine="string",
                        single_user_name="string",
                        spark_conf={
                            "string": "string",
                        },
                        spark_env_vars={
                            "string": "string",
                        },
                        autoscale=databricks.JobTaskForEachTaskTaskNewClusterAutoscaleArgs(
                            max_workers=0,
                            min_workers=0,
                        ),
                        ssh_public_keys=["string"],
                        workload_type=databricks.JobTaskForEachTaskTaskNewClusterWorkloadTypeArgs(
                            clients=databricks.JobTaskForEachTaskTaskNewClusterWorkloadTypeClientsArgs(
                                jobs=False,
                                notebooks=False,
                            ),
                        ),
                    ),
                    disable_auto_optimization=False,
                    email_notifications=databricks.JobTaskForEachTaskTaskEmailNotificationsArgs(
                        no_alert_for_skipped_runs=False,
                        on_duration_warning_threshold_exceededs=["string"],
                        on_failures=["string"],
                        on_starts=["string"],
                        on_streaming_backlog_exceededs=["string"],
                        on_successes=["string"],
                    ),
                    environment_key="string",
                    existing_cluster_id="string",
                    health=databricks.JobTaskForEachTaskTaskHealthArgs(
                        rules=[databricks.JobTaskForEachTaskTaskHealthRuleArgs(
                            metric="string",
                            op="string",
                            value=0,
                        )],
                    ),
                    job_cluster_key="string",
                    libraries=[databricks.JobTaskForEachTaskTaskLibraryArgs(
                        cran=databricks.JobTaskForEachTaskTaskLibraryCranArgs(
                            package="string",
                            repo="string",
                        ),
                        egg="string",
                        jar="string",
                        maven=databricks.JobTaskForEachTaskTaskLibraryMavenArgs(
                            coordinates="string",
                            exclusions=["string"],
                            repo="string",
                        ),
                        pypi=databricks.JobTaskForEachTaskTaskLibraryPypiArgs(
                            package="string",
                            repo="string",
                        ),
                        requirements="string",
                        whl="string",
                    )],
                    max_retries=0,
                    min_retry_interval_millis=0,
                    description="string",
                    depends_ons=[databricks.JobTaskForEachTaskTaskDependsOnArgs(
                        task_key="string",
                        outcome="string",
                    )],
                    spark_python_task=databricks.JobTaskForEachTaskTaskSparkPythonTaskArgs(
                        python_file="string",
                        parameters=["string"],
                        source="string",
                    ),
                    pipeline_task=databricks.JobTaskForEachTaskTaskPipelineTaskArgs(
                        pipeline_id="string",
                        full_refresh=False,
                    ),
                    python_wheel_task=databricks.JobTaskForEachTaskTaskPythonWheelTaskArgs(
                        entry_point="string",
                        named_parameters={
                            "string": "string",
                        },
                        package_name="string",
                        parameters=["string"],
                    ),
                    retry_on_timeout=False,
                    run_if="string",
                    run_job_task=databricks.JobTaskForEachTaskTaskRunJobTaskArgs(
                        job_id=0,
                        dbt_commands=["string"],
                        jar_params=["string"],
                        job_parameters={
                            "string": "string",
                        },
                        notebook_params={
                            "string": "string",
                        },
                        pipeline_params=databricks.JobTaskForEachTaskTaskRunJobTaskPipelineParamsArgs(
                            full_refresh=False,
                        ),
                        python_named_params={
                            "string": "string",
                        },
                        python_params=["string"],
                        spark_submit_params=["string"],
                        sql_params={
                            "string": "string",
                        },
                    ),
                    spark_jar_task=databricks.JobTaskForEachTaskTaskSparkJarTaskArgs(
                        jar_uri="string",
                        main_class_name="string",
                        parameters=["string"],
                    ),
                    notification_settings=databricks.JobTaskForEachTaskTaskNotificationSettingsArgs(
                        alert_on_last_attempt=False,
                        no_alert_for_canceled_runs=False,
                        no_alert_for_skipped_runs=False,
                    ),
                    spark_submit_task=databricks.JobTaskForEachTaskTaskSparkSubmitTaskArgs(
                        parameters=["string"],
                    ),
                    sql_task=databricks.JobTaskForEachTaskTaskSqlTaskArgs(
                        warehouse_id="string",
                        alert=databricks.JobTaskForEachTaskTaskSqlTaskAlertArgs(
                            alert_id="string",
                            subscriptions=[databricks.JobTaskForEachTaskTaskSqlTaskAlertSubscriptionArgs(
                                destination_id="string",
                                user_name="string",
                            )],
                            pause_subscriptions=False,
                        ),
                        dashboard=databricks.JobTaskForEachTaskTaskSqlTaskDashboardArgs(
                            dashboard_id="string",
                            custom_subject="string",
                            pause_subscriptions=False,
                            subscriptions=[databricks.JobTaskForEachTaskTaskSqlTaskDashboardSubscriptionArgs(
                                destination_id="string",
                                user_name="string",
                            )],
                        ),
                        file=databricks.JobTaskForEachTaskTaskSqlTaskFileArgs(
                            path="string",
                            source="string",
                        ),
                        parameters={
                            "string": "string",
                        },
                        query=databricks.JobTaskForEachTaskTaskSqlTaskQueryArgs(
                            query_id="string",
                        ),
                    ),
                    dbt_task=databricks.JobTaskForEachTaskTaskDbtTaskArgs(
                        commands=["string"],
                        catalog="string",
                        profiles_directory="string",
                        project_directory="string",
                        schema="string",
                        source="string",
                        warehouse_id="string",
                    ),
                    timeout_seconds=0,
                    condition_task=databricks.JobTaskForEachTaskTaskConditionTaskArgs(
                        left="string",
                        op="string",
                        right="string",
                    ),
                ),
                concurrency=0,
            ),
            health=databricks.JobTaskHealthArgs(
                rules=[databricks.JobTaskHealthRuleArgs(
                    metric="string",
                    op="string",
                    value=0,
                )],
            ),
            job_cluster_key="string",
            libraries=[databricks.JobTaskLibraryArgs(
                cran=databricks.JobTaskLibraryCranArgs(
                    package="string",
                    repo="string",
                ),
                egg="string",
                jar="string",
                maven=databricks.JobTaskLibraryMavenArgs(
                    coordinates="string",
                    exclusions=["string"],
                    repo="string",
                ),
                pypi=databricks.JobTaskLibraryPypiArgs(
                    package="string",
                    repo="string",
                ),
                requirements="string",
                whl="string",
            )],
            max_retries=0,
            webhook_notifications=databricks.JobTaskWebhookNotificationsArgs(
                on_duration_warning_threshold_exceededs=[databricks.JobTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs(
                    id="string",
                )],
                on_failures=[databricks.JobTaskWebhookNotificationsOnFailureArgs(
                    id="string",
                )],
                on_starts=[databricks.JobTaskWebhookNotificationsOnStartArgs(
                    id="string",
                )],
                on_streaming_backlog_exceededs=[databricks.JobTaskWebhookNotificationsOnStreamingBacklogExceededArgs(
                    id="string",
                )],
                on_successes=[databricks.JobTaskWebhookNotificationsOnSuccessArgs(
                    id="string",
                )],
            ),
            depends_ons=[databricks.JobTaskDependsOnArgs(
                task_key="string",
                outcome="string",
            )],
            retry_on_timeout=False,
            notification_settings=databricks.JobTaskNotificationSettingsArgs(
                alert_on_last_attempt=False,
                no_alert_for_canceled_runs=False,
                no_alert_for_skipped_runs=False,
            ),
            pipeline_task=databricks.JobTaskPipelineTaskArgs(
                pipeline_id="string",
                full_refresh=False,
            ),
            python_wheel_task=databricks.JobTaskPythonWheelTaskArgs(
                entry_point="string",
                named_parameters={
                    "string": "string",
                },
                package_name="string",
                parameters=["string"],
            ),
            notebook_task=databricks.JobTaskNotebookTaskArgs(
                notebook_path="string",
                base_parameters={
                    "string": "string",
                },
                source="string",
                warehouse_id="string",
            ),
            run_if="string",
            run_job_task=databricks.JobTaskRunJobTaskArgs(
                job_id=0,
                dbt_commands=["string"],
                jar_params=["string"],
                job_parameters={
                    "string": "string",
                },
                notebook_params={
                    "string": "string",
                },
                pipeline_params=databricks.JobTaskRunJobTaskPipelineParamsArgs(
                    full_refresh=False,
                ),
                python_named_params={
                    "string": "string",
                },
                python_params=["string"],
                spark_submit_params=["string"],
                sql_params={
                    "string": "string",
                },
            ),
            spark_jar_task=databricks.JobTaskSparkJarTaskArgs(
                jar_uri="string",
                main_class_name="string",
                parameters=["string"],
            ),
            spark_python_task=databricks.JobTaskSparkPythonTaskArgs(
                python_file="string",
                parameters=["string"],
                source="string",
            ),
            spark_submit_task=databricks.JobTaskSparkSubmitTaskArgs(
                parameters=["string"],
            ),
            sql_task=databricks.JobTaskSqlTaskArgs(
                warehouse_id="string",
                alert=databricks.JobTaskSqlTaskAlertArgs(
                    alert_id="string",
                    subscriptions=[databricks.JobTaskSqlTaskAlertSubscriptionArgs(
                        destination_id="string",
                        user_name="string",
                    )],
                    pause_subscriptions=False,
                ),
                dashboard=databricks.JobTaskSqlTaskDashboardArgs(
                    dashboard_id="string",
                    custom_subject="string",
                    pause_subscriptions=False,
                    subscriptions=[databricks.JobTaskSqlTaskDashboardSubscriptionArgs(
                        destination_id="string",
                        user_name="string",
                    )],
                ),
                file=databricks.JobTaskSqlTaskFileArgs(
                    path="string",
                    source="string",
                ),
                parameters={
                    "string": "string",
                },
                query=databricks.JobTaskSqlTaskQueryArgs(
                    query_id="string",
                ),
            ),
            condition_task=databricks.JobTaskConditionTaskArgs(
                left="string",
                op="string",
                right="string",
            ),
            timeout_seconds=0,
            min_retry_interval_millis=0,
        )],
        timeout_seconds=0,
        trigger=databricks.JobTriggerArgs(
            file_arrival=databricks.JobTriggerFileArrivalArgs(
                url="string",
                min_time_between_triggers_seconds=0,
                wait_after_last_change_seconds=0,
            ),
            pause_status="string",
            periodic=databricks.JobTriggerPeriodicArgs(
                interval=0,
                unit="string",
            ),
            table=databricks.JobTriggerTableArgs(
                condition="string",
                min_time_between_triggers_seconds=0,
                table_names=["string"],
                wait_after_last_change_seconds=0,
            ),
            table_update=databricks.JobTriggerTableUpdateArgs(
                table_names=["string"],
                condition="string",
                min_time_between_triggers_seconds=0,
                wait_after_last_change_seconds=0,
            ),
        ),
        webhook_notifications=databricks.JobWebhookNotificationsArgs(
            on_duration_warning_threshold_exceededs=[databricks.JobWebhookNotificationsOnDurationWarningThresholdExceededArgs(
                id="string",
            )],
            on_failures=[databricks.JobWebhookNotificationsOnFailureArgs(
                id="string",
            )],
            on_starts=[databricks.JobWebhookNotificationsOnStartArgs(
                id="string",
            )],
            on_streaming_backlog_exceededs=[databricks.JobWebhookNotificationsOnStreamingBacklogExceededArgs(
                id="string",
            )],
            on_successes=[databricks.JobWebhookNotificationsOnSuccessArgs(
                id="string",
            )],
        ))
    
    const jobResource = new databricks.Job("jobResource", {
        continuous: {
            pauseStatus: "string",
        },
        controlRunState: false,
        deployment: {
            kind: "string",
            metadataFilePath: "string",
        },
        description: "string",
        editMode: "string",
        emailNotifications: {
            noAlertForSkippedRuns: false,
            onDurationWarningThresholdExceededs: ["string"],
            onFailures: ["string"],
            onStarts: ["string"],
            onStreamingBacklogExceededs: ["string"],
            onSuccesses: ["string"],
        },
        environments: [{
            environmentKey: "string",
            spec: {
                client: "string",
                dependencies: ["string"],
            },
        }],
        existingClusterId: "string",
        format: "string",
        gitSource: {
            url: "string",
            branch: "string",
            commit: "string",
            gitSnapshot: {
                usedCommit: "string",
            },
            jobSource: {
                importFromGitBranch: "string",
                jobConfigPath: "string",
                dirtyState: "string",
            },
            provider: "string",
            tag: "string",
        },
        health: {
            rules: [{
                metric: "string",
                op: "string",
                value: 0,
            }],
        },
        jobClusters: [{
            jobClusterKey: "string",
            newCluster: {
                sparkVersion: "string",
                enableLocalDiskEncryption: false,
                clusterLogConf: {
                    dbfs: {
                        destination: "string",
                    },
                    s3: {
                        destination: "string",
                        cannedAcl: "string",
                        enableEncryption: false,
                        encryptionType: "string",
                        endpoint: "string",
                        kmsKey: "string",
                        region: "string",
                    },
                },
                gcpAttributes: {
                    availability: "string",
                    bootDiskSize: 0,
                    googleServiceAccount: "string",
                    localSsdCount: 0,
                    usePreemptibleExecutors: false,
                    zoneId: "string",
                },
                clusterId: "string",
                idempotencyToken: "string",
                clusterMountInfos: [{
                    localMountDirPath: "string",
                    networkFilesystemInfo: {
                        serverAddress: "string",
                        mountOptions: "string",
                    },
                    remoteMountDirPath: "string",
                }],
                clusterName: "string",
                customTags: {
                    string: "string",
                },
                dataSecurityMode: "string",
                dockerImage: {
                    url: "string",
                    basicAuth: {
                        password: "string",
                        username: "string",
                    },
                },
                driverInstancePoolId: "string",
                initScripts: [{
                    abfss: {
                        destination: "string",
                    },
                    file: {
                        destination: "string",
                    },
                    gcs: {
                        destination: "string",
                    },
                    s3: {
                        destination: "string",
                        cannedAcl: "string",
                        enableEncryption: false,
                        encryptionType: "string",
                        endpoint: "string",
                        kmsKey: "string",
                        region: "string",
                    },
                    volumes: {
                        destination: "string",
                    },
                    workspace: {
                        destination: "string",
                    },
                }],
                enableElasticDisk: false,
                applyPolicyDefaultValues: false,
                azureAttributes: {
                    availability: "string",
                    firstOnDemand: 0,
                    logAnalyticsInfo: {
                        logAnalyticsPrimaryKey: "string",
                        logAnalyticsWorkspaceId: "string",
                    },
                    spotBidMaxPrice: 0,
                },
                awsAttributes: {
                    availability: "string",
                    ebsVolumeCount: 0,
                    ebsVolumeIops: 0,
                    ebsVolumeSize: 0,
                    ebsVolumeThroughput: 0,
                    ebsVolumeType: "string",
                    firstOnDemand: 0,
                    instanceProfileArn: "string",
                    spotBidPricePercent: 0,
                    zoneId: "string",
                },
                driverNodeTypeId: "string",
                instancePoolId: "string",
                libraries: [{
                    cran: {
                        "package": "string",
                        repo: "string",
                    },
                    egg: "string",
                    jar: "string",
                    maven: {
                        coordinates: "string",
                        exclusions: ["string"],
                        repo: "string",
                    },
                    pypi: {
                        "package": "string",
                        repo: "string",
                    },
                    requirements: "string",
                    whl: "string",
                }],
                nodeTypeId: "string",
                numWorkers: 0,
                policyId: "string",
                runtimeEngine: "string",
                singleUserName: "string",
                sparkConf: {
                    string: "string",
                },
                sparkEnvVars: {
                    string: "string",
                },
                autoscale: {
                    maxWorkers: 0,
                    minWorkers: 0,
                },
                sshPublicKeys: ["string"],
                workloadType: {
                    clients: {
                        jobs: false,
                        notebooks: false,
                    },
                },
            },
        }],
        libraries: [{
            cran: {
                "package": "string",
                repo: "string",
            },
            egg: "string",
            jar: "string",
            maven: {
                coordinates: "string",
                exclusions: ["string"],
                repo: "string",
            },
            pypi: {
                "package": "string",
                repo: "string",
            },
            requirements: "string",
            whl: "string",
        }],
        maxConcurrentRuns: 0,
        name: "string",
        newCluster: {
            sparkVersion: "string",
            enableLocalDiskEncryption: false,
            clusterLogConf: {
                dbfs: {
                    destination: "string",
                },
                s3: {
                    destination: "string",
                    cannedAcl: "string",
                    enableEncryption: false,
                    encryptionType: "string",
                    endpoint: "string",
                    kmsKey: "string",
                    region: "string",
                },
            },
            gcpAttributes: {
                availability: "string",
                bootDiskSize: 0,
                googleServiceAccount: "string",
                localSsdCount: 0,
                usePreemptibleExecutors: false,
                zoneId: "string",
            },
            clusterId: "string",
            idempotencyToken: "string",
            clusterMountInfos: [{
                localMountDirPath: "string",
                networkFilesystemInfo: {
                    serverAddress: "string",
                    mountOptions: "string",
                },
                remoteMountDirPath: "string",
            }],
            clusterName: "string",
            customTags: {
                string: "string",
            },
            dataSecurityMode: "string",
            dockerImage: {
                url: "string",
                basicAuth: {
                    password: "string",
                    username: "string",
                },
            },
            driverInstancePoolId: "string",
            initScripts: [{
                abfss: {
                    destination: "string",
                },
                file: {
                    destination: "string",
                },
                gcs: {
                    destination: "string",
                },
                s3: {
                    destination: "string",
                    cannedAcl: "string",
                    enableEncryption: false,
                    encryptionType: "string",
                    endpoint: "string",
                    kmsKey: "string",
                    region: "string",
                },
                volumes: {
                    destination: "string",
                },
                workspace: {
                    destination: "string",
                },
            }],
            enableElasticDisk: false,
            applyPolicyDefaultValues: false,
            azureAttributes: {
                availability: "string",
                firstOnDemand: 0,
                logAnalyticsInfo: {
                    logAnalyticsPrimaryKey: "string",
                    logAnalyticsWorkspaceId: "string",
                },
                spotBidMaxPrice: 0,
            },
            awsAttributes: {
                availability: "string",
                ebsVolumeCount: 0,
                ebsVolumeIops: 0,
                ebsVolumeSize: 0,
                ebsVolumeThroughput: 0,
                ebsVolumeType: "string",
                firstOnDemand: 0,
                instanceProfileArn: "string",
                spotBidPricePercent: 0,
                zoneId: "string",
            },
            driverNodeTypeId: "string",
            instancePoolId: "string",
            libraries: [{
                cran: {
                    "package": "string",
                    repo: "string",
                },
                egg: "string",
                jar: "string",
                maven: {
                    coordinates: "string",
                    exclusions: ["string"],
                    repo: "string",
                },
                pypi: {
                    "package": "string",
                    repo: "string",
                },
                requirements: "string",
                whl: "string",
            }],
            nodeTypeId: "string",
            numWorkers: 0,
            policyId: "string",
            runtimeEngine: "string",
            singleUserName: "string",
            sparkConf: {
                string: "string",
            },
            sparkEnvVars: {
                string: "string",
            },
            autoscale: {
                maxWorkers: 0,
                minWorkers: 0,
            },
            sshPublicKeys: ["string"],
            workloadType: {
                clients: {
                    jobs: false,
                    notebooks: false,
                },
            },
        },
        notificationSettings: {
            noAlertForCanceledRuns: false,
            noAlertForSkippedRuns: false,
        },
        parameters: [{
            "default": "string",
            name: "string",
        }],
        queue: {
            enabled: false,
        },
        runAs: {
            servicePrincipalName: "string",
            userName: "string",
        },
        schedule: {
            quartzCronExpression: "string",
            timezoneId: "string",
            pauseStatus: "string",
        },
        tags: {
            string: "string",
        },
        tasks: [{
            taskKey: "string",
            newCluster: {
                sparkVersion: "string",
                enableLocalDiskEncryption: false,
                clusterLogConf: {
                    dbfs: {
                        destination: "string",
                    },
                    s3: {
                        destination: "string",
                        cannedAcl: "string",
                        enableEncryption: false,
                        encryptionType: "string",
                        endpoint: "string",
                        kmsKey: "string",
                        region: "string",
                    },
                },
                gcpAttributes: {
                    availability: "string",
                    bootDiskSize: 0,
                    googleServiceAccount: "string",
                    localSsdCount: 0,
                    usePreemptibleExecutors: false,
                    zoneId: "string",
                },
                clusterId: "string",
                idempotencyToken: "string",
                clusterMountInfos: [{
                    localMountDirPath: "string",
                    networkFilesystemInfo: {
                        serverAddress: "string",
                        mountOptions: "string",
                    },
                    remoteMountDirPath: "string",
                }],
                clusterName: "string",
                customTags: {
                    string: "string",
                },
                dataSecurityMode: "string",
                dockerImage: {
                    url: "string",
                    basicAuth: {
                        password: "string",
                        username: "string",
                    },
                },
                driverInstancePoolId: "string",
                initScripts: [{
                    abfss: {
                        destination: "string",
                    },
                    file: {
                        destination: "string",
                    },
                    gcs: {
                        destination: "string",
                    },
                    s3: {
                        destination: "string",
                        cannedAcl: "string",
                        enableEncryption: false,
                        encryptionType: "string",
                        endpoint: "string",
                        kmsKey: "string",
                        region: "string",
                    },
                    volumes: {
                        destination: "string",
                    },
                    workspace: {
                        destination: "string",
                    },
                }],
                enableElasticDisk: false,
                applyPolicyDefaultValues: false,
                azureAttributes: {
                    availability: "string",
                    firstOnDemand: 0,
                    logAnalyticsInfo: {
                        logAnalyticsPrimaryKey: "string",
                        logAnalyticsWorkspaceId: "string",
                    },
                    spotBidMaxPrice: 0,
                },
                awsAttributes: {
                    availability: "string",
                    ebsVolumeCount: 0,
                    ebsVolumeIops: 0,
                    ebsVolumeSize: 0,
                    ebsVolumeThroughput: 0,
                    ebsVolumeType: "string",
                    firstOnDemand: 0,
                    instanceProfileArn: "string",
                    spotBidPricePercent: 0,
                    zoneId: "string",
                },
                driverNodeTypeId: "string",
                instancePoolId: "string",
                libraries: [{
                    cran: {
                        "package": "string",
                        repo: "string",
                    },
                    egg: "string",
                    jar: "string",
                    maven: {
                        coordinates: "string",
                        exclusions: ["string"],
                        repo: "string",
                    },
                    pypi: {
                        "package": "string",
                        repo: "string",
                    },
                    requirements: "string",
                    whl: "string",
                }],
                nodeTypeId: "string",
                numWorkers: 0,
                policyId: "string",
                runtimeEngine: "string",
                singleUserName: "string",
                sparkConf: {
                    string: "string",
                },
                sparkEnvVars: {
                    string: "string",
                },