1. Packages
  2. Databricks Provider
  3. API Docs
  4. Job
Databricks v1.59.0 published on Wednesday, Jan 29, 2025 by Pulumi

databricks.Job

Explore with Pulumi AI

databricks logo
Databricks v1.59.0 published on Wednesday, Jan 29, 2025 by Pulumi

    The databricks.Job resource allows you to manage Databricks Jobs to run non-interactive code in a databricks_cluster.

    Example Usage

    In Pulumi configuration, it is recommended to define tasks in alphabetical order of their task_key arguments, so that you get consistent and readable diff. Whenever tasks are added or removed, or task_key is renamed, you’ll observe a change in the majority of tasks. It’s related to the fact that the current version of the provider treats task blocks as an ordered list. Alternatively, task block could have been an unordered set, though end-users would see the entire block replaced upon a change in single property of the task.

    It is possible to create a Databricks job using task blocks. A single task is defined with the task block containing one of the *_task blocks, task_key, and additional arguments described below.

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const _this = new databricks.Job("this", {
        name: "Job with multiple tasks",
        description: "This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished.",
        jobClusters: [{
            jobClusterKey: "j",
            newCluster: {
                numWorkers: 2,
                sparkVersion: latest.id,
                nodeTypeId: smallest.id,
            },
        }],
        tasks: [
            {
                taskKey: "a",
                newCluster: {
                    numWorkers: 1,
                    sparkVersion: latest.id,
                    nodeTypeId: smallest.id,
                },
                notebookTask: {
                    notebookPath: thisDatabricksNotebook.path,
                },
            },
            {
                taskKey: "b",
                dependsOns: [{
                    taskKey: "a",
                }],
                existingClusterId: shared.id,
                sparkJarTask: {
                    mainClassName: "com.acme.data.Main",
                },
            },
            {
                taskKey: "c",
                jobClusterKey: "j",
                notebookTask: {
                    notebookPath: thisDatabricksNotebook.path,
                },
            },
            {
                taskKey: "d",
                pipelineTask: {
                    pipelineId: thisDatabricksPipeline.id,
                },
            },
        ],
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    this = databricks.Job("this",
        name="Job with multiple tasks",
        description="This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished.",
        job_clusters=[{
            "job_cluster_key": "j",
            "new_cluster": {
                "num_workers": 2,
                "spark_version": latest["id"],
                "node_type_id": smallest["id"],
            },
        }],
        tasks=[
            {
                "task_key": "a",
                "new_cluster": {
                    "num_workers": 1,
                    "spark_version": latest["id"],
                    "node_type_id": smallest["id"],
                },
                "notebook_task": {
                    "notebook_path": this_databricks_notebook["path"],
                },
            },
            {
                "task_key": "b",
                "depends_ons": [{
                    "task_key": "a",
                }],
                "existing_cluster_id": shared["id"],
                "spark_jar_task": {
                    "main_class_name": "com.acme.data.Main",
                },
            },
            {
                "task_key": "c",
                "job_cluster_key": "j",
                "notebook_task": {
                    "notebook_path": this_databricks_notebook["path"],
                },
            },
            {
                "task_key": "d",
                "pipeline_task": {
                    "pipeline_id": this_databricks_pipeline["id"],
                },
            },
        ])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewJob(ctx, "this", &databricks.JobArgs{
    			Name:        pulumi.String("Job with multiple tasks"),
    			Description: pulumi.String("This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished."),
    			JobClusters: databricks.JobJobClusterArray{
    				&databricks.JobJobClusterArgs{
    					JobClusterKey: pulumi.String("j"),
    					NewCluster: &databricks.JobJobClusterNewClusterArgs{
    						NumWorkers:   pulumi.Int(2),
    						SparkVersion: pulumi.Any(latest.Id),
    						NodeTypeId:   pulumi.Any(smallest.Id),
    					},
    				},
    			},
    			Tasks: databricks.JobTaskArray{
    				&databricks.JobTaskArgs{
    					TaskKey: pulumi.String("a"),
    					NewCluster: &databricks.JobTaskNewClusterArgs{
    						NumWorkers:   pulumi.Int(1),
    						SparkVersion: pulumi.Any(latest.Id),
    						NodeTypeId:   pulumi.Any(smallest.Id),
    					},
    					NotebookTask: &databricks.JobTaskNotebookTaskArgs{
    						NotebookPath: pulumi.Any(thisDatabricksNotebook.Path),
    					},
    				},
    				&databricks.JobTaskArgs{
    					TaskKey: pulumi.String("b"),
    					DependsOns: databricks.JobTaskDependsOnArray{
    						&databricks.JobTaskDependsOnArgs{
    							TaskKey: pulumi.String("a"),
    						},
    					},
    					ExistingClusterId: pulumi.Any(shared.Id),
    					SparkJarTask: &databricks.JobTaskSparkJarTaskArgs{
    						MainClassName: pulumi.String("com.acme.data.Main"),
    					},
    				},
    				&databricks.JobTaskArgs{
    					TaskKey:       pulumi.String("c"),
    					JobClusterKey: pulumi.String("j"),
    					NotebookTask: &databricks.JobTaskNotebookTaskArgs{
    						NotebookPath: pulumi.Any(thisDatabricksNotebook.Path),
    					},
    				},
    				&databricks.JobTaskArgs{
    					TaskKey: pulumi.String("d"),
    					PipelineTask: &databricks.JobTaskPipelineTaskArgs{
    						PipelineId: pulumi.Any(thisDatabricksPipeline.Id),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var @this = new Databricks.Job("this", new()
        {
            Name = "Job with multiple tasks",
            Description = "This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished.",
            JobClusters = new[]
            {
                new Databricks.Inputs.JobJobClusterArgs
                {
                    JobClusterKey = "j",
                    NewCluster = new Databricks.Inputs.JobJobClusterNewClusterArgs
                    {
                        NumWorkers = 2,
                        SparkVersion = latest.Id,
                        NodeTypeId = smallest.Id,
                    },
                },
            },
            Tasks = new[]
            {
                new Databricks.Inputs.JobTaskArgs
                {
                    TaskKey = "a",
                    NewCluster = new Databricks.Inputs.JobTaskNewClusterArgs
                    {
                        NumWorkers = 1,
                        SparkVersion = latest.Id,
                        NodeTypeId = smallest.Id,
                    },
                    NotebookTask = new Databricks.Inputs.JobTaskNotebookTaskArgs
                    {
                        NotebookPath = thisDatabricksNotebook.Path,
                    },
                },
                new Databricks.Inputs.JobTaskArgs
                {
                    TaskKey = "b",
                    DependsOns = new[]
                    {
                        new Databricks.Inputs.JobTaskDependsOnArgs
                        {
                            TaskKey = "a",
                        },
                    },
                    ExistingClusterId = shared.Id,
                    SparkJarTask = new Databricks.Inputs.JobTaskSparkJarTaskArgs
                    {
                        MainClassName = "com.acme.data.Main",
                    },
                },
                new Databricks.Inputs.JobTaskArgs
                {
                    TaskKey = "c",
                    JobClusterKey = "j",
                    NotebookTask = new Databricks.Inputs.JobTaskNotebookTaskArgs
                    {
                        NotebookPath = thisDatabricksNotebook.Path,
                    },
                },
                new Databricks.Inputs.JobTaskArgs
                {
                    TaskKey = "d",
                    PipelineTask = new Databricks.Inputs.JobTaskPipelineTaskArgs
                    {
                        PipelineId = thisDatabricksPipeline.Id,
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.Job;
    import com.pulumi.databricks.JobArgs;
    import com.pulumi.databricks.inputs.JobJobClusterArgs;
    import com.pulumi.databricks.inputs.JobJobClusterNewClusterArgs;
    import com.pulumi.databricks.inputs.JobTaskArgs;
    import com.pulumi.databricks.inputs.JobTaskNewClusterArgs;
    import com.pulumi.databricks.inputs.JobTaskNotebookTaskArgs;
    import com.pulumi.databricks.inputs.JobTaskSparkJarTaskArgs;
    import com.pulumi.databricks.inputs.JobTaskPipelineTaskArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var this_ = new Job("this", JobArgs.builder()
                .name("Job with multiple tasks")
                .description("This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished.")
                .jobClusters(JobJobClusterArgs.builder()
                    .jobClusterKey("j")
                    .newCluster(JobJobClusterNewClusterArgs.builder()
                        .numWorkers(2)
                        .sparkVersion(latest.id())
                        .nodeTypeId(smallest.id())
                        .build())
                    .build())
                .tasks(            
                    JobTaskArgs.builder()
                        .taskKey("a")
                        .newCluster(JobTaskNewClusterArgs.builder()
                            .numWorkers(1)
                            .sparkVersion(latest.id())
                            .nodeTypeId(smallest.id())
                            .build())
                        .notebookTask(JobTaskNotebookTaskArgs.builder()
                            .notebookPath(thisDatabricksNotebook.path())
                            .build())
                        .build(),
                    JobTaskArgs.builder()
                        .taskKey("b")
                        .dependsOns(JobTaskDependsOnArgs.builder()
                            .taskKey("a")
                            .build())
                        .existingClusterId(shared.id())
                        .sparkJarTask(JobTaskSparkJarTaskArgs.builder()
                            .mainClassName("com.acme.data.Main")
                            .build())
                        .build(),
                    JobTaskArgs.builder()
                        .taskKey("c")
                        .jobClusterKey("j")
                        .notebookTask(JobTaskNotebookTaskArgs.builder()
                            .notebookPath(thisDatabricksNotebook.path())
                            .build())
                        .build(),
                    JobTaskArgs.builder()
                        .taskKey("d")
                        .pipelineTask(JobTaskPipelineTaskArgs.builder()
                            .pipelineId(thisDatabricksPipeline.id())
                            .build())
                        .build())
                .build());
    
        }
    }
    
    resources:
      this:
        type: databricks:Job
        properties:
          name: Job with multiple tasks
          description: This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished.
          jobClusters:
            - jobClusterKey: j
              newCluster:
                numWorkers: 2
                sparkVersion: ${latest.id}
                nodeTypeId: ${smallest.id}
          tasks:
            - taskKey: a
              newCluster:
                numWorkers: 1
                sparkVersion: ${latest.id}
                nodeTypeId: ${smallest.id}
              notebookTask:
                notebookPath: ${thisDatabricksNotebook.path}
            - taskKey: b
              dependsOns:
                - taskKey: a
              existingClusterId: ${shared.id}
              sparkJarTask:
                mainClassName: com.acme.data.Main
            - taskKey: c
              jobClusterKey: j
              notebookTask:
                notebookPath: ${thisDatabricksNotebook.path}
            - taskKey: d
              pipelineTask:
                pipelineId: ${thisDatabricksPipeline.id}
    

    Access Control

    By default, all users can create and modify jobs unless an administrator enables jobs access control. With jobs access control, individual permissions determine a user’s abilities.

    • databricks.Permissions can control which groups or individual users can Can View, Can Manage Run, and Can Manage.
    • databricks.ClusterPolicy can control which kinds of clusters users can create for jobs.

    Create Job Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Job(name: string, args?: JobArgs, opts?: CustomResourceOptions);
    @overload
    def Job(resource_name: str,
            args: Optional[JobArgs] = None,
            opts: Optional[ResourceOptions] = None)
    
    @overload
    def Job(resource_name: str,
            opts: Optional[ResourceOptions] = None,
            always_running: Optional[bool] = None,
            budget_policy_id: Optional[str] = None,
            continuous: Optional[JobContinuousArgs] = None,
            control_run_state: Optional[bool] = None,
            dbt_task: Optional[JobDbtTaskArgs] = None,
            deployment: Optional[JobDeploymentArgs] = None,
            description: Optional[str] = None,
            edit_mode: Optional[str] = None,
            email_notifications: Optional[JobEmailNotificationsArgs] = None,
            environments: Optional[Sequence[JobEnvironmentArgs]] = None,
            existing_cluster_id: Optional[str] = None,
            format: Optional[str] = None,
            git_source: Optional[JobGitSourceArgs] = None,
            health: Optional[JobHealthArgs] = None,
            job_clusters: Optional[Sequence[JobJobClusterArgs]] = None,
            libraries: Optional[Sequence[JobLibraryArgs]] = None,
            max_concurrent_runs: Optional[int] = None,
            max_retries: Optional[int] = None,
            min_retry_interval_millis: Optional[int] = None,
            name: Optional[str] = None,
            new_cluster: Optional[JobNewClusterArgs] = None,
            notebook_task: Optional[JobNotebookTaskArgs] = None,
            notification_settings: Optional[JobNotificationSettingsArgs] = None,
            parameters: Optional[Sequence[JobParameterArgs]] = None,
            pipeline_task: Optional[JobPipelineTaskArgs] = None,
            python_wheel_task: Optional[JobPythonWheelTaskArgs] = None,
            queue: Optional[JobQueueArgs] = None,
            retry_on_timeout: Optional[bool] = None,
            run_as: Optional[JobRunAsArgs] = None,
            run_job_task: Optional[JobRunJobTaskArgs] = None,
            schedule: Optional[JobScheduleArgs] = None,
            spark_jar_task: Optional[JobSparkJarTaskArgs] = None,
            spark_python_task: Optional[JobSparkPythonTaskArgs] = None,
            spark_submit_task: Optional[JobSparkSubmitTaskArgs] = None,
            tags: Optional[Mapping[str, str]] = None,
            tasks: Optional[Sequence[JobTaskArgs]] = None,
            timeout_seconds: Optional[int] = None,
            trigger: Optional[JobTriggerArgs] = None,
            webhook_notifications: Optional[JobWebhookNotificationsArgs] = None)
    func NewJob(ctx *Context, name string, args *JobArgs, opts ...ResourceOption) (*Job, error)
    public Job(string name, JobArgs? args = null, CustomResourceOptions? opts = null)
    public Job(String name, JobArgs args)
    public Job(String name, JobArgs args, CustomResourceOptions options)
    
    type: databricks:Job
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args JobArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var jobResource = new Databricks.Job("jobResource", new()
    {
        BudgetPolicyId = "string",
        Continuous = new Databricks.Inputs.JobContinuousArgs
        {
            PauseStatus = "string",
        },
        ControlRunState = false,
        Deployment = new Databricks.Inputs.JobDeploymentArgs
        {
            Kind = "string",
            MetadataFilePath = "string",
        },
        Description = "string",
        EditMode = "string",
        EmailNotifications = new Databricks.Inputs.JobEmailNotificationsArgs
        {
            NoAlertForSkippedRuns = false,
            OnDurationWarningThresholdExceededs = new[]
            {
                "string",
            },
            OnFailures = new[]
            {
                "string",
            },
            OnStarts = new[]
            {
                "string",
            },
            OnStreamingBacklogExceededs = new[]
            {
                "string",
            },
            OnSuccesses = new[]
            {
                "string",
            },
        },
        Environments = new[]
        {
            new Databricks.Inputs.JobEnvironmentArgs
            {
                EnvironmentKey = "string",
                Spec = new Databricks.Inputs.JobEnvironmentSpecArgs
                {
                    Client = "string",
                    Dependencies = new[]
                    {
                        "string",
                    },
                },
            },
        },
        ExistingClusterId = "string",
        Format = "string",
        GitSource = new Databricks.Inputs.JobGitSourceArgs
        {
            Url = "string",
            Branch = "string",
            Commit = "string",
            GitSnapshot = new Databricks.Inputs.JobGitSourceGitSnapshotArgs
            {
                UsedCommit = "string",
            },
            JobSource = new Databricks.Inputs.JobGitSourceJobSourceArgs
            {
                ImportFromGitBranch = "string",
                JobConfigPath = "string",
                DirtyState = "string",
            },
            Provider = "string",
            Tag = "string",
        },
        Health = new Databricks.Inputs.JobHealthArgs
        {
            Rules = new[]
            {
                new Databricks.Inputs.JobHealthRuleArgs
                {
                    Metric = "string",
                    Op = "string",
                    Value = 0,
                },
            },
        },
        JobClusters = new[]
        {
            new Databricks.Inputs.JobJobClusterArgs
            {
                JobClusterKey = "string",
                NewCluster = new Databricks.Inputs.JobJobClusterNewClusterArgs
                {
                    SparkVersion = "string",
                    IdempotencyToken = "string",
                    SshPublicKeys = new[]
                    {
                        "string",
                    },
                    AzureAttributes = new Databricks.Inputs.JobJobClusterNewClusterAzureAttributesArgs
                    {
                        Availability = "string",
                        FirstOnDemand = 0,
                        LogAnalyticsInfo = new Databricks.Inputs.JobJobClusterNewClusterAzureAttributesLogAnalyticsInfoArgs
                        {
                            LogAnalyticsPrimaryKey = "string",
                            LogAnalyticsWorkspaceId = "string",
                        },
                        SpotBidMaxPrice = 0,
                    },
                    ClusterId = "string",
                    ClusterLogConf = new Databricks.Inputs.JobJobClusterNewClusterClusterLogConfArgs
                    {
                        Dbfs = new Databricks.Inputs.JobJobClusterNewClusterClusterLogConfDbfsArgs
                        {
                            Destination = "string",
                        },
                        S3 = new Databricks.Inputs.JobJobClusterNewClusterClusterLogConfS3Args
                        {
                            Destination = "string",
                            CannedAcl = "string",
                            EnableEncryption = false,
                            EncryptionType = "string",
                            Endpoint = "string",
                            KmsKey = "string",
                            Region = "string",
                        },
                    },
                    ClusterMountInfos = new[]
                    {
                        new Databricks.Inputs.JobJobClusterNewClusterClusterMountInfoArgs
                        {
                            LocalMountDirPath = "string",
                            NetworkFilesystemInfo = new Databricks.Inputs.JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoArgs
                            {
                                ServerAddress = "string",
                                MountOptions = "string",
                            },
                            RemoteMountDirPath = "string",
                        },
                    },
                    InitScripts = new[]
                    {
                        new Databricks.Inputs.JobJobClusterNewClusterInitScriptArgs
                        {
                            Abfss = new Databricks.Inputs.JobJobClusterNewClusterInitScriptAbfssArgs
                            {
                                Destination = "string",
                            },
                            File = new Databricks.Inputs.JobJobClusterNewClusterInitScriptFileArgs
                            {
                                Destination = "string",
                            },
                            Gcs = new Databricks.Inputs.JobJobClusterNewClusterInitScriptGcsArgs
                            {
                                Destination = "string",
                            },
                            S3 = new Databricks.Inputs.JobJobClusterNewClusterInitScriptS3Args
                            {
                                Destination = "string",
                                CannedAcl = "string",
                                EnableEncryption = false,
                                EncryptionType = "string",
                                Endpoint = "string",
                                KmsKey = "string",
                                Region = "string",
                            },
                            Volumes = new Databricks.Inputs.JobJobClusterNewClusterInitScriptVolumesArgs
                            {
                                Destination = "string",
                            },
                            Workspace = new Databricks.Inputs.JobJobClusterNewClusterInitScriptWorkspaceArgs
                            {
                                Destination = "string",
                            },
                        },
                    },
                    CustomTags = 
                    {
                        { "string", "string" },
                    },
                    DataSecurityMode = "string",
                    DockerImage = new Databricks.Inputs.JobJobClusterNewClusterDockerImageArgs
                    {
                        Url = "string",
                        BasicAuth = new Databricks.Inputs.JobJobClusterNewClusterDockerImageBasicAuthArgs
                        {
                            Password = "string",
                            Username = "string",
                        },
                    },
                    DriverInstancePoolId = "string",
                    DriverNodeTypeId = "string",
                    EnableElasticDisk = false,
                    EnableLocalDiskEncryption = false,
                    WorkloadType = new Databricks.Inputs.JobJobClusterNewClusterWorkloadTypeArgs
                    {
                        Clients = new Databricks.Inputs.JobJobClusterNewClusterWorkloadTypeClientsArgs
                        {
                            Jobs = false,
                            Notebooks = false,
                        },
                    },
                    AwsAttributes = new Databricks.Inputs.JobJobClusterNewClusterAwsAttributesArgs
                    {
                        Availability = "string",
                        EbsVolumeCount = 0,
                        EbsVolumeIops = 0,
                        EbsVolumeSize = 0,
                        EbsVolumeThroughput = 0,
                        EbsVolumeType = "string",
                        FirstOnDemand = 0,
                        InstanceProfileArn = "string",
                        SpotBidPricePercent = 0,
                        ZoneId = "string",
                    },
                    ClusterName = "string",
                    InstancePoolId = "string",
                    IsSingleNode = false,
                    Kind = "string",
                    Libraries = new[]
                    {
                        new Databricks.Inputs.JobJobClusterNewClusterLibraryArgs
                        {
                            Cran = new Databricks.Inputs.JobJobClusterNewClusterLibraryCranArgs
                            {
                                Package = "string",
                                Repo = "string",
                            },
                            Egg = "string",
                            Jar = "string",
                            Maven = new Databricks.Inputs.JobJobClusterNewClusterLibraryMavenArgs
                            {
                                Coordinates = "string",
                                Exclusions = new[]
                                {
                                    "string",
                                },
                                Repo = "string",
                            },
                            Pypi = new Databricks.Inputs.JobJobClusterNewClusterLibraryPypiArgs
                            {
                                Package = "string",
                                Repo = "string",
                            },
                            Requirements = "string",
                            Whl = "string",
                        },
                    },
                    NodeTypeId = "string",
                    NumWorkers = 0,
                    PolicyId = "string",
                    RuntimeEngine = "string",
                    SingleUserName = "string",
                    SparkConf = 
                    {
                        { "string", "string" },
                    },
                    SparkEnvVars = 
                    {
                        { "string", "string" },
                    },
                    Autoscale = new Databricks.Inputs.JobJobClusterNewClusterAutoscaleArgs
                    {
                        MaxWorkers = 0,
                        MinWorkers = 0,
                    },
                    ApplyPolicyDefaultValues = false,
                    UseMlRuntime = false,
                    GcpAttributes = new Databricks.Inputs.JobJobClusterNewClusterGcpAttributesArgs
                    {
                        Availability = "string",
                        BootDiskSize = 0,
                        GoogleServiceAccount = "string",
                        LocalSsdCount = 0,
                        UsePreemptibleExecutors = false,
                        ZoneId = "string",
                    },
                },
            },
        },
        Libraries = new[]
        {
            new Databricks.Inputs.JobLibraryArgs
            {
                Cran = new Databricks.Inputs.JobLibraryCranArgs
                {
                    Package = "string",
                    Repo = "string",
                },
                Egg = "string",
                Jar = "string",
                Maven = new Databricks.Inputs.JobLibraryMavenArgs
                {
                    Coordinates = "string",
                    Exclusions = new[]
                    {
                        "string",
                    },
                    Repo = "string",
                },
                Pypi = new Databricks.Inputs.JobLibraryPypiArgs
                {
                    Package = "string",
                    Repo = "string",
                },
                Requirements = "string",
                Whl = "string",
            },
        },
        MaxConcurrentRuns = 0,
        Name = "string",
        NewCluster = new Databricks.Inputs.JobNewClusterArgs
        {
            SparkVersion = "string",
            IdempotencyToken = "string",
            SshPublicKeys = new[]
            {
                "string",
            },
            AzureAttributes = new Databricks.Inputs.JobNewClusterAzureAttributesArgs
            {
                Availability = "string",
                FirstOnDemand = 0,
                LogAnalyticsInfo = new Databricks.Inputs.JobNewClusterAzureAttributesLogAnalyticsInfoArgs
                {
                    LogAnalyticsPrimaryKey = "string",
                    LogAnalyticsWorkspaceId = "string",
                },
                SpotBidMaxPrice = 0,
            },
            ClusterId = "string",
            ClusterLogConf = new Databricks.Inputs.JobNewClusterClusterLogConfArgs
            {
                Dbfs = new Databricks.Inputs.JobNewClusterClusterLogConfDbfsArgs
                {
                    Destination = "string",
                },
                S3 = new Databricks.Inputs.JobNewClusterClusterLogConfS3Args
                {
                    Destination = "string",
                    CannedAcl = "string",
                    EnableEncryption = false,
                    EncryptionType = "string",
                    Endpoint = "string",
                    KmsKey = "string",
                    Region = "string",
                },
            },
            ClusterMountInfos = new[]
            {
                new Databricks.Inputs.JobNewClusterClusterMountInfoArgs
                {
                    LocalMountDirPath = "string",
                    NetworkFilesystemInfo = new Databricks.Inputs.JobNewClusterClusterMountInfoNetworkFilesystemInfoArgs
                    {
                        ServerAddress = "string",
                        MountOptions = "string",
                    },
                    RemoteMountDirPath = "string",
                },
            },
            InitScripts = new[]
            {
                new Databricks.Inputs.JobNewClusterInitScriptArgs
                {
                    Abfss = new Databricks.Inputs.JobNewClusterInitScriptAbfssArgs
                    {
                        Destination = "string",
                    },
                    File = new Databricks.Inputs.JobNewClusterInitScriptFileArgs
                    {
                        Destination = "string",
                    },
                    Gcs = new Databricks.Inputs.JobNewClusterInitScriptGcsArgs
                    {
                        Destination = "string",
                    },
                    S3 = new Databricks.Inputs.JobNewClusterInitScriptS3Args
                    {
                        Destination = "string",
                        CannedAcl = "string",
                        EnableEncryption = false,
                        EncryptionType = "string",
                        Endpoint = "string",
                        KmsKey = "string",
                        Region = "string",
                    },
                    Volumes = new Databricks.Inputs.JobNewClusterInitScriptVolumesArgs
                    {
                        Destination = "string",
                    },
                    Workspace = new Databricks.Inputs.JobNewClusterInitScriptWorkspaceArgs
                    {
                        Destination = "string",
                    },
                },
            },
            CustomTags = 
            {
                { "string", "string" },
            },
            DataSecurityMode = "string",
            DockerImage = new Databricks.Inputs.JobNewClusterDockerImageArgs
            {
                Url = "string",
                BasicAuth = new Databricks.Inputs.JobNewClusterDockerImageBasicAuthArgs
                {
                    Password = "string",
                    Username = "string",
                },
            },
            DriverInstancePoolId = "string",
            DriverNodeTypeId = "string",
            EnableElasticDisk = false,
            EnableLocalDiskEncryption = false,
            WorkloadType = new Databricks.Inputs.JobNewClusterWorkloadTypeArgs
            {
                Clients = new Databricks.Inputs.JobNewClusterWorkloadTypeClientsArgs
                {
                    Jobs = false,
                    Notebooks = false,
                },
            },
            AwsAttributes = new Databricks.Inputs.JobNewClusterAwsAttributesArgs
            {
                Availability = "string",
                EbsVolumeCount = 0,
                EbsVolumeIops = 0,
                EbsVolumeSize = 0,
                EbsVolumeThroughput = 0,
                EbsVolumeType = "string",
                FirstOnDemand = 0,
                InstanceProfileArn = "string",
                SpotBidPricePercent = 0,
                ZoneId = "string",
            },
            ClusterName = "string",
            InstancePoolId = "string",
            IsSingleNode = false,
            Kind = "string",
            Libraries = new[]
            {
                new Databricks.Inputs.JobNewClusterLibraryArgs
                {
                    Cran = new Databricks.Inputs.JobNewClusterLibraryCranArgs
                    {
                        Package = "string",
                        Repo = "string",
                    },
                    Egg = "string",
                    Jar = "string",
                    Maven = new Databricks.Inputs.JobNewClusterLibraryMavenArgs
                    {
                        Coordinates = "string",
                        Exclusions = new[]
                        {
                            "string",
                        },
                        Repo = "string",
                    },
                    Pypi = new Databricks.Inputs.JobNewClusterLibraryPypiArgs
                    {
                        Package = "string",
                        Repo = "string",
                    },
                    Requirements = "string",
                    Whl = "string",
                },
            },
            NodeTypeId = "string",
            NumWorkers = 0,
            PolicyId = "string",
            RuntimeEngine = "string",
            SingleUserName = "string",
            SparkConf = 
            {
                { "string", "string" },
            },
            SparkEnvVars = 
            {
                { "string", "string" },
            },
            Autoscale = new Databricks.Inputs.JobNewClusterAutoscaleArgs
            {
                MaxWorkers = 0,
                MinWorkers = 0,
            },
            ApplyPolicyDefaultValues = false,
            UseMlRuntime = false,
            GcpAttributes = new Databricks.Inputs.JobNewClusterGcpAttributesArgs
            {
                Availability = "string",
                BootDiskSize = 0,
                GoogleServiceAccount = "string",
                LocalSsdCount = 0,
                UsePreemptibleExecutors = false,
                ZoneId = "string",
            },
        },
        NotificationSettings = new Databricks.Inputs.JobNotificationSettingsArgs
        {
            NoAlertForCanceledRuns = false,
            NoAlertForSkippedRuns = false,
        },
        Parameters = new[]
        {
            new Databricks.Inputs.JobParameterArgs
            {
                Default = "string",
                Name = "string",
            },
        },
        Queue = new Databricks.Inputs.JobQueueArgs
        {
            Enabled = false,
        },
        RunAs = new Databricks.Inputs.JobRunAsArgs
        {
            ServicePrincipalName = "string",
            UserName = "string",
        },
        Schedule = new Databricks.Inputs.JobScheduleArgs
        {
            QuartzCronExpression = "string",
            TimezoneId = "string",
            PauseStatus = "string",
        },
        Tags = 
        {
            { "string", "string" },
        },
        Tasks = new[]
        {
            new Databricks.Inputs.JobTaskArgs
            {
                TaskKey = "string",
                MinRetryIntervalMillis = 0,
                DisableAutoOptimization = false,
                NewCluster = new Databricks.Inputs.JobTaskNewClusterArgs
                {
                    SparkVersion = "string",
                    IdempotencyToken = "string",
                    SshPublicKeys = new[]
                    {
                        "string",
                    },
                    AzureAttributes = new Databricks.Inputs.JobTaskNewClusterAzureAttributesArgs
                    {
                        Availability = "string",
                        FirstOnDemand = 0,
                        LogAnalyticsInfo = new Databricks.Inputs.JobTaskNewClusterAzureAttributesLogAnalyticsInfoArgs
                        {
                            LogAnalyticsPrimaryKey = "string",
                            LogAnalyticsWorkspaceId = "string",
                        },
                        SpotBidMaxPrice = 0,
                    },
                    ClusterId = "string",
                    ClusterLogConf = new Databricks.Inputs.JobTaskNewClusterClusterLogConfArgs
                    {
                        Dbfs = new Databricks.Inputs.JobTaskNewClusterClusterLogConfDbfsArgs
                        {
                            Destination = "string",
                        },
                        S3 = new Databricks.Inputs.JobTaskNewClusterClusterLogConfS3Args
                        {
                            Destination = "string",
                            CannedAcl = "string",
                            EnableEncryption = false,
                            EncryptionType = "string",
                            Endpoint = "string",
                            KmsKey = "string",
                            Region = "string",
                        },
                    },
                    ClusterMountInfos = new[]
                    {
                        new Databricks.Inputs.JobTaskNewClusterClusterMountInfoArgs
                        {
                            LocalMountDirPath = "string",
                            NetworkFilesystemInfo = new Databricks.Inputs.JobTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs
                            {
                                ServerAddress = "string",
                                MountOptions = "string",
                            },
                            RemoteMountDirPath = "string",
                        },
                    },
                    InitScripts = new[]
                    {
                        new Databricks.Inputs.JobTaskNewClusterInitScriptArgs
                        {
                            Abfss = new Databricks.Inputs.JobTaskNewClusterInitScriptAbfssArgs
                            {
                                Destination = "string",
                            },
                            File = new Databricks.Inputs.JobTaskNewClusterInitScriptFileArgs
                            {
                                Destination = "string",
                            },
                            Gcs = new Databricks.Inputs.JobTaskNewClusterInitScriptGcsArgs
                            {
                                Destination = "string",
                            },
                            S3 = new Databricks.Inputs.JobTaskNewClusterInitScriptS3Args
                            {
                                Destination = "string",
                                CannedAcl = "string",
                                EnableEncryption = false,
                                EncryptionType = "string",
                                Endpoint = "string",
                                KmsKey = "string",
                                Region = "string",
                            },
                            Volumes = new Databricks.Inputs.JobTaskNewClusterInitScriptVolumesArgs
                            {
                                Destination = "string",
                            },
                            Workspace = new Databricks.Inputs.JobTaskNewClusterInitScriptWorkspaceArgs
                            {
                                Destination = "string",
                            },
                        },
                    },
                    CustomTags = 
                    {
                        { "string", "string" },
                    },
                    DataSecurityMode = "string",
                    DockerImage = new Databricks.Inputs.JobTaskNewClusterDockerImageArgs
                    {
                        Url = "string",
                        BasicAuth = new Databricks.Inputs.JobTaskNewClusterDockerImageBasicAuthArgs
                        {
                            Password = "string",
                            Username = "string",
                        },
                    },
                    DriverInstancePoolId = "string",
                    DriverNodeTypeId = "string",
                    EnableElasticDisk = false,
                    EnableLocalDiskEncryption = false,
                    WorkloadType = new Databricks.Inputs.JobTaskNewClusterWorkloadTypeArgs
                    {
                        Clients = new Databricks.Inputs.JobTaskNewClusterWorkloadTypeClientsArgs
                        {
                            Jobs = false,
                            Notebooks = false,
                        },
                    },
                    AwsAttributes = new Databricks.Inputs.JobTaskNewClusterAwsAttributesArgs
                    {
                        Availability = "string",
                        EbsVolumeCount = 0,
                        EbsVolumeIops = 0,
                        EbsVolumeSize = 0,
                        EbsVolumeThroughput = 0,
                        EbsVolumeType = "string",
                        FirstOnDemand = 0,
                        InstanceProfileArn = "string",
                        SpotBidPricePercent = 0,
                        ZoneId = "string",
                    },
                    ClusterName = "string",
                    InstancePoolId = "string",
                    IsSingleNode = false,
                    Kind = "string",
                    Libraries = new[]
                    {
                        new Databricks.Inputs.JobTaskNewClusterLibraryArgs
                        {
                            Cran = new Databricks.Inputs.JobTaskNewClusterLibraryCranArgs
                            {
                                Package = "string",
                                Repo = "string",
                            },
                            Egg = "string",
                            Jar = "string",
                            Maven = new Databricks.Inputs.JobTaskNewClusterLibraryMavenArgs
                            {
                                Coordinates = "string",
                                Exclusions = new[]
                                {
                                    "string",
                                },
                                Repo = "string",
                            },
                            Pypi = new Databricks.Inputs.JobTaskNewClusterLibraryPypiArgs
                            {
                                Package = "string",
                                Repo = "string",
                            },
                            Requirements = "string",
                            Whl = "string",
                        },
                    },
                    NodeTypeId = "string",
                    NumWorkers = 0,
                    PolicyId = "string",
                    RuntimeEngine = "string",
                    SingleUserName = "string",
                    SparkConf = 
                    {
                        { "string", "string" },
                    },
                    SparkEnvVars = 
                    {
                        { "string", "string" },
                    },
                    Autoscale = new Databricks.Inputs.JobTaskNewClusterAutoscaleArgs
                    {
                        MaxWorkers = 0,
                        MinWorkers = 0,
                    },
                    ApplyPolicyDefaultValues = false,
                    UseMlRuntime = false,
                    GcpAttributes = new Databricks.Inputs.JobTaskNewClusterGcpAttributesArgs
                    {
                        Availability = "string",
                        BootDiskSize = 0,
                        GoogleServiceAccount = "string",
                        LocalSsdCount = 0,
                        UsePreemptibleExecutors = false,
                        ZoneId = "string",
                    },
                },
                Description = "string",
                NotebookTask = new Databricks.Inputs.JobTaskNotebookTaskArgs
                {
                    NotebookPath = "string",
                    BaseParameters = 
                    {
                        { "string", "string" },
                    },
                    Source = "string",
                    WarehouseId = "string",
                },
                EmailNotifications = new Databricks.Inputs.JobTaskEmailNotificationsArgs
                {
                    NoAlertForSkippedRuns = false,
                    OnDurationWarningThresholdExceededs = new[]
                    {
                        "string",
                    },
                    OnFailures = new[]
                    {
                        "string",
                    },
                    OnStarts = new[]
                    {
                        "string",
                    },
                    OnStreamingBacklogExceededs = new[]
                    {
                        "string",
                    },
                    OnSuccesses = new[]
                    {
                        "string",
                    },
                },
                EnvironmentKey = "string",
                ExistingClusterId = "string",
                ForEachTask = new Databricks.Inputs.JobTaskForEachTaskArgs
                {
                    Inputs = "string",
                    Task = new Databricks.Inputs.JobTaskForEachTaskTaskArgs
                    {
                        TaskKey = "string",
                        NewCluster = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterArgs
                        {
                            SparkVersion = "string",
                            IdempotencyToken = "string",
                            SshPublicKeys = new[]
                            {
                                "string",
                            },
                            AzureAttributes = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterAzureAttributesArgs
                            {
                                Availability = "string",
                                FirstOnDemand = 0,
                                LogAnalyticsInfo = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterAzureAttributesLogAnalyticsInfoArgs
                                {
                                    LogAnalyticsPrimaryKey = "string",
                                    LogAnalyticsWorkspaceId = "string",
                                },
                                SpotBidMaxPrice = 0,
                            },
                            ClusterId = "string",
                            ClusterLogConf = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterClusterLogConfArgs
                            {
                                Dbfs = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterClusterLogConfDbfsArgs
                                {
                                    Destination = "string",
                                },
                                S3 = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterClusterLogConfS3Args
                                {
                                    Destination = "string",
                                    CannedAcl = "string",
                                    EnableEncryption = false,
                                    EncryptionType = "string",
                                    Endpoint = "string",
                                    KmsKey = "string",
                                    Region = "string",
                                },
                            },
                            ClusterMountInfos = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterClusterMountInfoArgs
                                {
                                    LocalMountDirPath = "string",
                                    NetworkFilesystemInfo = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs
                                    {
                                        ServerAddress = "string",
                                        MountOptions = "string",
                                    },
                                    RemoteMountDirPath = "string",
                                },
                            },
                            InitScripts = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptArgs
                                {
                                    Abfss = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptAbfssArgs
                                    {
                                        Destination = "string",
                                    },
                                    File = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptFileArgs
                                    {
                                        Destination = "string",
                                    },
                                    Gcs = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptGcsArgs
                                    {
                                        Destination = "string",
                                    },
                                    S3 = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptS3Args
                                    {
                                        Destination = "string",
                                        CannedAcl = "string",
                                        EnableEncryption = false,
                                        EncryptionType = "string",
                                        Endpoint = "string",
                                        KmsKey = "string",
                                        Region = "string",
                                    },
                                    Volumes = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptVolumesArgs
                                    {
                                        Destination = "string",
                                    },
                                    Workspace = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterInitScriptWorkspaceArgs
                                    {
                                        Destination = "string",
                                    },
                                },
                            },
                            CustomTags = 
                            {
                                { "string", "string" },
                            },
                            DataSecurityMode = "string",
                            DockerImage = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterDockerImageArgs
                            {
                                Url = "string",
                                BasicAuth = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterDockerImageBasicAuthArgs
                                {
                                    Password = "string",
                                    Username = "string",
                                },
                            },
                            DriverInstancePoolId = "string",
                            DriverNodeTypeId = "string",
                            EnableElasticDisk = false,
                            EnableLocalDiskEncryption = false,
                            WorkloadType = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterWorkloadTypeArgs
                            {
                                Clients = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterWorkloadTypeClientsArgs
                                {
                                    Jobs = false,
                                    Notebooks = false,
                                },
                            },
                            AwsAttributes = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterAwsAttributesArgs
                            {
                                Availability = "string",
                                EbsVolumeCount = 0,
                                EbsVolumeIops = 0,
                                EbsVolumeSize = 0,
                                EbsVolumeThroughput = 0,
                                EbsVolumeType = "string",
                                FirstOnDemand = 0,
                                InstanceProfileArn = "string",
                                SpotBidPricePercent = 0,
                                ZoneId = "string",
                            },
                            ClusterName = "string",
                            InstancePoolId = "string",
                            IsSingleNode = false,
                            Kind = "string",
                            Libraries = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterLibraryArgs
                                {
                                    Cran = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterLibraryCranArgs
                                    {
                                        Package = "string",
                                        Repo = "string",
                                    },
                                    Egg = "string",
                                    Jar = "string",
                                    Maven = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterLibraryMavenArgs
                                    {
                                        Coordinates = "string",
                                        Exclusions = new[]
                                        {
                                            "string",
                                        },
                                        Repo = "string",
                                    },
                                    Pypi = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterLibraryPypiArgs
                                    {
                                        Package = "string",
                                        Repo = "string",
                                    },
                                    Requirements = "string",
                                    Whl = "string",
                                },
                            },
                            NodeTypeId = "string",
                            NumWorkers = 0,
                            PolicyId = "string",
                            RuntimeEngine = "string",
                            SingleUserName = "string",
                            SparkConf = 
                            {
                                { "string", "string" },
                            },
                            SparkEnvVars = 
                            {
                                { "string", "string" },
                            },
                            Autoscale = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterAutoscaleArgs
                            {
                                MaxWorkers = 0,
                                MinWorkers = 0,
                            },
                            ApplyPolicyDefaultValues = false,
                            UseMlRuntime = false,
                            GcpAttributes = new Databricks.Inputs.JobTaskForEachTaskTaskNewClusterGcpAttributesArgs
                            {
                                Availability = "string",
                                BootDiskSize = 0,
                                GoogleServiceAccount = "string",
                                LocalSsdCount = 0,
                                UsePreemptibleExecutors = false,
                                ZoneId = "string",
                            },
                        },
                        ConditionTask = new Databricks.Inputs.JobTaskForEachTaskTaskConditionTaskArgs
                        {
                            Left = "string",
                            Op = "string",
                            Right = "string",
                        },
                        DependsOns = new[]
                        {
                            new Databricks.Inputs.JobTaskForEachTaskTaskDependsOnArgs
                            {
                                TaskKey = "string",
                                Outcome = "string",
                            },
                        },
                        Description = "string",
                        DisableAutoOptimization = false,
                        EmailNotifications = new Databricks.Inputs.JobTaskForEachTaskTaskEmailNotificationsArgs
                        {
                            NoAlertForSkippedRuns = false,
                            OnDurationWarningThresholdExceededs = new[]
                            {
                                "string",
                            },
                            OnFailures = new[]
                            {
                                "string",
                            },
                            OnStarts = new[]
                            {
                                "string",
                            },
                            OnStreamingBacklogExceededs = new[]
                            {
                                "string",
                            },
                            OnSuccesses = new[]
                            {
                                "string",
                            },
                        },
                        EnvironmentKey = "string",
                        ExistingClusterId = "string",
                        Health = new Databricks.Inputs.JobTaskForEachTaskTaskHealthArgs
                        {
                            Rules = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskHealthRuleArgs
                                {
                                    Metric = "string",
                                    Op = "string",
                                    Value = 0,
                                },
                            },
                        },
                        JobClusterKey = "string",
                        Libraries = new[]
                        {
                            new Databricks.Inputs.JobTaskForEachTaskTaskLibraryArgs
                            {
                                Cran = new Databricks.Inputs.JobTaskForEachTaskTaskLibraryCranArgs
                                {
                                    Package = "string",
                                    Repo = "string",
                                },
                                Egg = "string",
                                Jar = "string",
                                Maven = new Databricks.Inputs.JobTaskForEachTaskTaskLibraryMavenArgs
                                {
                                    Coordinates = "string",
                                    Exclusions = new[]
                                    {
                                        "string",
                                    },
                                    Repo = "string",
                                },
                                Pypi = new Databricks.Inputs.JobTaskForEachTaskTaskLibraryPypiArgs
                                {
                                    Package = "string",
                                    Repo = "string",
                                },
                                Requirements = "string",
                                Whl = "string",
                            },
                        },
                        MaxRetries = 0,
                        WebhookNotifications = new Databricks.Inputs.JobTaskForEachTaskTaskWebhookNotificationsArgs
                        {
                            OnDurationWarningThresholdExceededs = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs
                                {
                                    Id = "string",
                                },
                            },
                            OnFailures = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskWebhookNotificationsOnFailureArgs
                                {
                                    Id = "string",
                                },
                            },
                            OnStarts = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskWebhookNotificationsOnStartArgs
                                {
                                    Id = "string",
                                },
                            },
                            OnStreamingBacklogExceededs = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceededArgs
                                {
                                    Id = "string",
                                },
                            },
                            OnSuccesses = new[]
                            {
                                new Databricks.Inputs.JobTaskForEachTaskTaskWebhookNotificationsOnSuccessArgs
                                {
                                    Id = "string",
                                },
                            },
                        },
                        DbtTask = new Databricks.Inputs.JobTaskForEachTaskTaskDbtTaskArgs
                        {
                            Commands = new[]
                            {
                                "string",
                            },
                            Catalog = "string",
                            ProfilesDirectory = "string",
                            ProjectDirectory = "string",
                            Schema = "string",
                            Source = "string",
                            WarehouseId = "string",
                        },
                        RetryOnTimeout = false,
                        NotificationSettings = new Databricks.Inputs.JobTaskForEachTaskTaskNotificationSettingsArgs
                        {
                            AlertOnLastAttempt = false,
                            NoAlertForCanceledRuns = false,
                            NoAlertForSkippedRuns = false,
                        },
                        PipelineTask = new Databricks.Inputs.JobTaskForEachTaskTaskPipelineTaskArgs
                        {
                            PipelineId = "string",
                            FullRefresh = false,
                        },
                        PythonWheelTask = new Databricks.Inputs.JobTaskForEachTaskTaskPythonWheelTaskArgs
                        {
                            EntryPoint = "string",
                            NamedParameters = 
                            {
                                { "string", "string" },
                            },
                            PackageName = "string",
                            Parameters = new[]
                            {
                                "string",
                            },
                        },
                        NotebookTask = new Databricks.Inputs.JobTaskForEachTaskTaskNotebookTaskArgs
                        {
                            NotebookPath = "string",
                            BaseParameters = 
                            {
                                { "string", "string" },
                            },
                            Source = "string",
                            WarehouseId = "string",
                        },
                        RunIf = "string",
                        RunJobTask = new Databricks.Inputs.JobTaskForEachTaskTaskRunJobTaskArgs
                        {
                            JobId = 0,
                            DbtCommands = new[]
                            {
                                "string",
                            },
                            JarParams = new[]
                            {
                                "string",
                            },
                            JobParameters = 
                            {
                                { "string", "string" },
                            },
                            NotebookParams = 
                            {
                                { "string", "string" },
                            },
                            PipelineParams = new Databricks.Inputs.JobTaskForEachTaskTaskRunJobTaskPipelineParamsArgs
                            {
                                FullRefresh = false,
                            },
                            PythonNamedParams = 
                            {
                                { "string", "string" },
                            },
                            PythonParams = new[]
                            {
                                "string",
                            },
                            SparkSubmitParams = new[]
                            {
                                "string",
                            },
                            SqlParams = 
                            {
                                { "string", "string" },
                            },
                        },
                        SparkJarTask = new Databricks.Inputs.JobTaskForEachTaskTaskSparkJarTaskArgs
                        {
                            JarUri = "string",
                            MainClassName = "string",
                            Parameters = new[]
                            {
                                "string",
                            },
                            RunAsRepl = false,
                        },
                        SparkPythonTask = new Databricks.Inputs.JobTaskForEachTaskTaskSparkPythonTaskArgs
                        {
                            PythonFile = "string",
                            Parameters = new[]
                            {
                                "string",
                            },
                            Source = "string",
                        },
                        SparkSubmitTask = new Databricks.Inputs.JobTaskForEachTaskTaskSparkSubmitTaskArgs
                        {
                            Parameters = new[]
                            {
                                "string",
                            },
                        },
                        SqlTask = new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskArgs
                        {
                            WarehouseId = "string",
                            Alert = new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskAlertArgs
                            {
                                AlertId = "string",
                                PauseSubscriptions = false,
                                Subscriptions = new[]
                                {
                                    new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskAlertSubscriptionArgs
                                    {
                                        DestinationId = "string",
                                        UserName = "string",
                                    },
                                },
                            },
                            Dashboard = new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskDashboardArgs
                            {
                                DashboardId = "string",
                                CustomSubject = "string",
                                PauseSubscriptions = false,
                                Subscriptions = new[]
                                {
                                    new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskDashboardSubscriptionArgs
                                    {
                                        DestinationId = "string",
                                        UserName = "string",
                                    },
                                },
                            },
                            File = new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskFileArgs
                            {
                                Path = "string",
                                Source = "string",
                            },
                            Parameters = 
                            {
                                { "string", "string" },
                            },
                            Query = new Databricks.Inputs.JobTaskForEachTaskTaskSqlTaskQueryArgs
                            {
                                QueryId = "string",
                            },
                        },
                        CleanRoomsNotebookTask = new Databricks.Inputs.JobTaskForEachTaskTaskCleanRoomsNotebookTaskArgs
                        {
                            CleanRoomName = "string",
                            NotebookName = "string",
                            Etag = "string",
                            NotebookBaseParameters = 
                            {
                                { "string", "string" },
                            },
                        },
                        TimeoutSeconds = 0,
                        MinRetryIntervalMillis = 0,
                    },
                    Concurrency = 0,
                },
                Health = new Databricks.Inputs.JobTaskHealthArgs
                {
                    Rules = new[]
                    {
                        new Databricks.Inputs.JobTaskHealthRuleArgs
                        {
                            Metric = "string",
                            Op = "string",
                            Value = 0,
                        },
                    },
                },
                JobClusterKey = "string",
                NotificationSettings = new Databricks.Inputs.JobTaskNotificationSettingsArgs
                {
                    AlertOnLastAttempt = false,
                    NoAlertForCanceledRuns = false,
                    NoAlertForSkippedRuns = false,
                },
                MaxRetries = 0,
                CleanRoomsNotebookTask = new Databricks.Inputs.JobTaskCleanRoomsNotebookTaskArgs
                {
                    CleanRoomName = "string",
                    NotebookName = "string",
                    Etag = "string",
                    NotebookBaseParameters = 
                    {
                        { "string", "string" },
                    },
                },
                DependsOns = new[]
                {
                    new Databricks.Inputs.JobTaskDependsOnArgs
                    {
                        TaskKey = "string",
                        Outcome = "string",
                    },
                },
                DbtTask = new Databricks.Inputs.JobTaskDbtTaskArgs
                {
                    Commands = new[]
                    {
                        "string",
                    },
                    Catalog = "string",
                    ProfilesDirectory = "string",
                    ProjectDirectory = "string",
                    Schema = "string",
                    Source = "string",
                    WarehouseId = "string",
                },
                Libraries = new[]
                {
                    new Databricks.Inputs.JobTaskLibraryArgs
                    {
                        Cran = new Databricks.Inputs.JobTaskLibraryCranArgs
                        {
                            Package = "string",
                            Repo = "string",
                        },
                        Egg = "string",
                        Jar = "string",
                        Maven = new Databricks.Inputs.JobTaskLibraryMavenArgs
                        {
                            Coordinates = "string",
                            Exclusions = new[]
                            {
                                "string",
                            },
                            Repo = "string",
                        },
                        Pypi = new Databricks.Inputs.JobTaskLibraryPypiArgs
                        {
                            Package = "string",
                            Repo = "string",
                        },
                        Requirements = "string",
                        Whl = "string",
                    },
                },
                PipelineTask = new Databricks.Inputs.JobTaskPipelineTaskArgs
                {
                    PipelineId = "string",
                    FullRefresh = false,
                },
                PythonWheelTask = new Databricks.Inputs.JobTaskPythonWheelTaskArgs
                {
                    EntryPoint = "string",
                    NamedParameters = 
                    {
                        { "string", "string" },
                    },
                    PackageName = "string",
                    Parameters = new[]
                    {
                        "string",
                    },
                },
                RetryOnTimeout = false,
                RunIf = "string",
                RunJobTask = new Databricks.Inputs.JobTaskRunJobTaskArgs
                {
                    JobId = 0,
                    DbtCommands = new[]
                    {
                        "string",
                    },
                    JarParams = new[]
                    {
                        "string",
                    },
                    JobParameters = 
                    {
                        { "string", "string" },
                    },
                    NotebookParams = 
                    {
                        { "string", "string" },
                    },
                    PipelineParams = new Databricks.Inputs.JobTaskRunJobTaskPipelineParamsArgs
                    {
                        FullRefresh = false,
                    },
                    PythonNamedParams = 
                    {
                        { "string", "string" },
                    },
                    PythonParams = new[]
                    {
                        "string",
                    },
                    SparkSubmitParams = new[]
                    {
                        "string",
                    },
                    SqlParams = 
                    {
                        { "string", "string" },
                    },
                },
                SparkJarTask = new Databricks.Inputs.JobTaskSparkJarTaskArgs
                {
                    JarUri = "string",
                    MainClassName = "string",
                    Parameters = new[]
                    {
                        "string",
                    },
                    RunAsRepl = false,
                },
                SparkPythonTask = new Databricks.Inputs.JobTaskSparkPythonTaskArgs
                {
                    PythonFile = "string",
                    Parameters = new[]
                    {
                        "string",
                    },
                    Source = "string",
                },
                SparkSubmitTask = new Databricks.Inputs.JobTaskSparkSubmitTaskArgs
                {
                    Parameters = new[]
                    {
                        "string",
                    },
                },
                SqlTask = new Databricks.Inputs.JobTaskSqlTaskArgs
                {
                    WarehouseId = "string",
                    Alert = new Databricks.Inputs.JobTaskSqlTaskAlertArgs
                    {
                        AlertId = "string",
                        PauseSubscriptions = false,
                        Subscriptions = new[]
                        {
                            new Databricks.Inputs.JobTaskSqlTaskAlertSubscriptionArgs
                            {
                                DestinationId = "string",
                                UserName = "string",
                            },
                        },
                    },
                    Dashboard = new Databricks.Inputs.JobTaskSqlTaskDashboardArgs
                    {
                        DashboardId = "string",
                        CustomSubject = "string",
                        PauseSubscriptions = false,
                        Subscriptions = new[]
                        {
                            new Databricks.Inputs.JobTaskSqlTaskDashboardSubscriptionArgs
                            {
                                DestinationId = "string",
                                UserName = "string",
                            },
                        },
                    },
                    File = new Databricks.Inputs.JobTaskSqlTaskFileArgs
                    {
                        Path = "string",
                        Source = "string",
                    },
                    Parameters = 
                    {
                        { "string", "string" },
                    },
                    Query = new Databricks.Inputs.JobTaskSqlTaskQueryArgs
                    {
                        QueryId = "string",
                    },
                },
                ConditionTask = new Databricks.Inputs.JobTaskConditionTaskArgs
                {
                    Left = "string",
                    Op = "string",
                    Right = "string",
                },
                TimeoutSeconds = 0,
                WebhookNotifications = new Databricks.Inputs.JobTaskWebhookNotificationsArgs
                {
                    OnDurationWarningThresholdExceededs = new[]
                    {
                        new Databricks.Inputs.JobTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs
                        {
                            Id = "string",
                        },
                    },
                    OnFailures = new[]
                    {
                        new Databricks.Inputs.JobTaskWebhookNotificationsOnFailureArgs
                        {
                            Id = "string",
                        },
                    },
                    OnStarts = new[]
                    {
                        new Databricks.Inputs.JobTaskWebhookNotificationsOnStartArgs
                        {
                            Id = "string",
                        },
                    },
                    OnStreamingBacklogExceededs = new[]
                    {
                        new Databricks.Inputs.JobTaskWebhookNotificationsOnStreamingBacklogExceededArgs
                        {
                            Id = "string",
                        },
                    },
                    OnSuccesses = new[]
                    {
                        new Databricks.Inputs.JobTaskWebhookNotificationsOnSuccessArgs
                        {
                            Id = "string",
                        },
                    },
                },
            },
        },
        TimeoutSeconds = 0,
        Trigger = new Databricks.Inputs.JobTriggerArgs
        {
            FileArrival = new Databricks.Inputs.JobTriggerFileArrivalArgs
            {
                Url = "string",
                MinTimeBetweenTriggersSeconds = 0,
                WaitAfterLastChangeSeconds = 0,
            },
            PauseStatus = "string",
            Periodic = new Databricks.Inputs.JobTriggerPeriodicArgs
            {
                Interval = 0,
                Unit = "string",
            },
            Table = new Databricks.Inputs.JobTriggerTableArgs
            {
                Condition = "string",
                MinTimeBetweenTriggersSeconds = 0,
                TableNames = new[]
                {
                    "string",
                },
                WaitAfterLastChangeSeconds = 0,
            },
            TableUpdate = new Databricks.Inputs.JobTriggerTableUpdateArgs
            {
                TableNames = new[]
                {
                    "string",
                },
                Condition = "string",
                MinTimeBetweenTriggersSeconds = 0,
                WaitAfterLastChangeSeconds = 0,
            },
        },
        WebhookNotifications = new Databricks.Inputs.JobWebhookNotificationsArgs
        {
            OnDurationWarningThresholdExceededs = new[]
            {
                new Databricks.Inputs.JobWebhookNotificationsOnDurationWarningThresholdExceededArgs
                {
                    Id = "string",
                },
            },
            OnFailures = new[]
            {
                new Databricks.Inputs.JobWebhookNotificationsOnFailureArgs
                {
                    Id = "string",
                },
            },
            OnStarts = new[]
            {
                new Databricks.Inputs.JobWebhookNotificationsOnStartArgs
                {
                    Id = "string",
                },
            },
            OnStreamingBacklogExceededs = new[]
            {
                new Databricks.Inputs.JobWebhookNotificationsOnStreamingBacklogExceededArgs
                {
                    Id = "string",
                },
            },
            OnSuccesses = new[]
            {
                new Databricks.Inputs.JobWebhookNotificationsOnSuccessArgs
                {
                    Id = "string",
                },
            },
        },
    });
    
    example, err := databricks.NewJob(ctx, "jobResource", &databricks.JobArgs{
    	BudgetPolicyId: pulumi.String("string"),
    	Continuous: &databricks.JobContinuousArgs{
    		PauseStatus: pulumi.String("string"),
    	},
    	ControlRunState: pulumi.Bool(false),
    	Deployment: &databricks.JobDeploymentArgs{
    		Kind:             pulumi.String("string"),
    		MetadataFilePath: pulumi.String("string"),
    	},
    	Description: pulumi.String("string"),
    	EditMode:    pulumi.String("string"),
    	EmailNotifications: &databricks.JobEmailNotificationsArgs{
    		NoAlertForSkippedRuns: pulumi.Bool(false),
    		OnDurationWarningThresholdExceededs: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		OnFailures: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		OnStarts: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		OnStreamingBacklogExceededs: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		OnSuccesses: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    	},
    	Environments: databricks.JobEnvironmentArray{
    		&databricks.JobEnvironmentArgs{
    			EnvironmentKey: pulumi.String("string"),
    			Spec: &databricks.JobEnvironmentSpecArgs{
    				Client: pulumi.String("string"),
    				Dependencies: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    			},
    		},
    	},
    	ExistingClusterId: pulumi.String("string"),
    	Format:            pulumi.String("string"),
    	GitSource: &databricks.JobGitSourceArgs{
    		Url:    pulumi.String("string"),
    		Branch: pulumi.String("string"),
    		Commit: pulumi.String("string"),
    		GitSnapshot: &databricks.JobGitSourceGitSnapshotArgs{
    			UsedCommit: pulumi.String("string"),
    		},
    		JobSource: &databricks.JobGitSourceJobSourceArgs{
    			ImportFromGitBranch: pulumi.String("string"),
    			JobConfigPath:       pulumi.String("string"),
    			DirtyState:          pulumi.String("string"),
    		},
    		Provider: pulumi.String("string"),
    		Tag:      pulumi.String("string"),
    	},
    	Health: &databricks.JobHealthArgs{
    		Rules: databricks.JobHealthRuleArray{
    			&databricks.JobHealthRuleArgs{
    				Metric: pulumi.String("string"),
    				Op:     pulumi.String("string"),
    				Value:  pulumi.Int(0),
    			},
    		},
    	},
    	JobClusters: databricks.JobJobClusterArray{
    		&databricks.JobJobClusterArgs{
    			JobClusterKey: pulumi.String("string"),
    			NewCluster: &databricks.JobJobClusterNewClusterArgs{
    				SparkVersion:     pulumi.String("string"),
    				IdempotencyToken: pulumi.String("string"),
    				SshPublicKeys: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				AzureAttributes: &databricks.JobJobClusterNewClusterAzureAttributesArgs{
    					Availability:  pulumi.String("string"),
    					FirstOnDemand: pulumi.Int(0),
    					LogAnalyticsInfo: &databricks.JobJobClusterNewClusterAzureAttributesLogAnalyticsInfoArgs{
    						LogAnalyticsPrimaryKey:  pulumi.String("string"),
    						LogAnalyticsWorkspaceId: pulumi.String("string"),
    					},
    					SpotBidMaxPrice: pulumi.Float64(0),
    				},
    				ClusterId: pulumi.String("string"),
    				ClusterLogConf: &databricks.JobJobClusterNewClusterClusterLogConfArgs{
    					Dbfs: &databricks.JobJobClusterNewClusterClusterLogConfDbfsArgs{
    						Destination: pulumi.String("string"),
    					},
    					S3: &databricks.JobJobClusterNewClusterClusterLogConfS3Args{
    						Destination:      pulumi.String("string"),
    						CannedAcl:        pulumi.String("string"),
    						EnableEncryption: pulumi.Bool(false),
    						EncryptionType:   pulumi.String("string"),
    						Endpoint:         pulumi.String("string"),
    						KmsKey:           pulumi.String("string"),
    						Region:           pulumi.String("string"),
    					},
    				},
    				ClusterMountInfos: databricks.JobJobClusterNewClusterClusterMountInfoArray{
    					&databricks.JobJobClusterNewClusterClusterMountInfoArgs{
    						LocalMountDirPath: pulumi.String("string"),
    						NetworkFilesystemInfo: &databricks.JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoArgs{
    							ServerAddress: pulumi.String("string"),
    							MountOptions:  pulumi.String("string"),
    						},
    						RemoteMountDirPath: pulumi.String("string"),
    					},
    				},
    				InitScripts: databricks.JobJobClusterNewClusterInitScriptArray{
    					&databricks.JobJobClusterNewClusterInitScriptArgs{
    						Abfss: &databricks.JobJobClusterNewClusterInitScriptAbfssArgs{
    							Destination: pulumi.String("string"),
    						},
    						File: &databricks.JobJobClusterNewClusterInitScriptFileArgs{
    							Destination: pulumi.String("string"),
    						},
    						Gcs: &databricks.JobJobClusterNewClusterInitScriptGcsArgs{
    							Destination: pulumi.String("string"),
    						},
    						S3: &databricks.JobJobClusterNewClusterInitScriptS3Args{
    							Destination:      pulumi.String("string"),
    							CannedAcl:        pulumi.String("string"),
    							EnableEncryption: pulumi.Bool(false),
    							EncryptionType:   pulumi.String("string"),
    							Endpoint:         pulumi.String("string"),
    							KmsKey:           pulumi.String("string"),
    							Region:           pulumi.String("string"),
    						},
    						Volumes: &databricks.JobJobClusterNewClusterInitScriptVolumesArgs{
    							Destination: pulumi.String("string"),
    						},
    						Workspace: &databricks.JobJobClusterNewClusterInitScriptWorkspaceArgs{
    							Destination: pulumi.String("string"),
    						},
    					},
    				},
    				CustomTags: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				DataSecurityMode: pulumi.String("string"),
    				DockerImage: &databricks.JobJobClusterNewClusterDockerImageArgs{
    					Url: pulumi.String("string"),
    					BasicAuth: &databricks.JobJobClusterNewClusterDockerImageBasicAuthArgs{
    						Password: pulumi.String("string"),
    						Username: pulumi.String("string"),
    					},
    				},
    				DriverInstancePoolId:      pulumi.String("string"),
    				DriverNodeTypeId:          pulumi.String("string"),
    				EnableElasticDisk:         pulumi.Bool(false),
    				EnableLocalDiskEncryption: pulumi.Bool(false),
    				WorkloadType: &databricks.JobJobClusterNewClusterWorkloadTypeArgs{
    					Clients: &databricks.JobJobClusterNewClusterWorkloadTypeClientsArgs{
    						Jobs:      pulumi.Bool(false),
    						Notebooks: pulumi.Bool(false),
    					},
    				},
    				AwsAttributes: &databricks.JobJobClusterNewClusterAwsAttributesArgs{
    					Availability:        pulumi.String("string"),
    					EbsVolumeCount:      pulumi.Int(0),
    					EbsVolumeIops:       pulumi.Int(0),
    					EbsVolumeSize:       pulumi.Int(0),
    					EbsVolumeThroughput: pulumi.Int(0),
    					EbsVolumeType:       pulumi.String("string"),
    					FirstOnDemand:       pulumi.Int(0),
    					InstanceProfileArn:  pulumi.String("string"),
    					SpotBidPricePercent: pulumi.Int(0),
    					ZoneId:              pulumi.String("string"),
    				},
    				ClusterName:    pulumi.String("string"),
    				InstancePoolId: pulumi.String("string"),
    				IsSingleNode:   pulumi.Bool(false),
    				Kind:           pulumi.String("string"),
    				Libraries: databricks.JobJobClusterNewClusterLibraryArray{
    					&databricks.JobJobClusterNewClusterLibraryArgs{
    						Cran: &databricks.JobJobClusterNewClusterLibraryCranArgs{
    							Package: pulumi.String("string"),
    							Repo:    pulumi.String("string"),
    						},
    						Egg: pulumi.String("string"),
    						Jar: pulumi.String("string"),
    						Maven: &databricks.JobJobClusterNewClusterLibraryMavenArgs{
    							Coordinates: pulumi.String("string"),
    							Exclusions: pulumi.StringArray{
    								pulumi.String("string"),
    							},
    							Repo: pulumi.String("string"),
    						},
    						Pypi: &databricks.JobJobClusterNewClusterLibraryPypiArgs{
    							Package: pulumi.String("string"),
    							Repo:    pulumi.String("string"),
    						},
    						Requirements: pulumi.String("string"),
    						Whl:          pulumi.String("string"),
    					},
    				},
    				NodeTypeId:     pulumi.String("string"),
    				NumWorkers:     pulumi.Int(0),
    				PolicyId:       pulumi.String("string"),
    				RuntimeEngine:  pulumi.String("string"),
    				SingleUserName: pulumi.String("string"),
    				SparkConf: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				SparkEnvVars: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				Autoscale: &databricks.JobJobClusterNewClusterAutoscaleArgs{
    					MaxWorkers: pulumi.Int(0),
    					MinWorkers: pulumi.Int(0),
    				},
    				ApplyPolicyDefaultValues: pulumi.Bool(false),
    				UseMlRuntime:             pulumi.Bool(false),
    				GcpAttributes: &databricks.JobJobClusterNewClusterGcpAttributesArgs{
    					Availability:            pulumi.String("string"),
    					BootDiskSize:            pulumi.Int(0),
    					GoogleServiceAccount:    pulumi.String("string"),
    					LocalSsdCount:           pulumi.Int(0),
    					UsePreemptibleExecutors: pulumi.Bool(false),
    					ZoneId:                  pulumi.String("string"),
    				},
    			},
    		},
    	},
    	Libraries: databricks.JobLibraryArray{
    		&databricks.JobLibraryArgs{
    			Cran: &databricks.JobLibraryCranArgs{
    				Package: pulumi.String("string"),
    				Repo:    pulumi.String("string"),
    			},
    			Egg: pulumi.String("string"),
    			Jar: pulumi.String("string"),
    			Maven: &databricks.JobLibraryMavenArgs{
    				Coordinates: pulumi.String("string"),
    				Exclusions: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				Repo: pulumi.String("string"),
    			},
    			Pypi: &databricks.JobLibraryPypiArgs{
    				Package: pulumi.String("string"),
    				Repo:    pulumi.String("string"),
    			},
    			Requirements: pulumi.String("string"),
    			Whl:          pulumi.String("string"),
    		},
    	},
    	MaxConcurrentRuns: pulumi.Int(0),
    	Name:              pulumi.String("string"),
    	NewCluster: &databricks.JobNewClusterArgs{
    		SparkVersion:     pulumi.String("string"),
    		IdempotencyToken: pulumi.String("string"),
    		SshPublicKeys: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		AzureAttributes: &databricks.JobNewClusterAzureAttributesArgs{
    			Availability:  pulumi.String("string"),
    			FirstOnDemand: pulumi.Int(0),
    			LogAnalyticsInfo: &databricks.JobNewClusterAzureAttributesLogAnalyticsInfoArgs{
    				LogAnalyticsPrimaryKey:  pulumi.String("string"),
    				LogAnalyticsWorkspaceId: pulumi.String("string"),
    			},
    			SpotBidMaxPrice: pulumi.Float64(0),
    		},
    		ClusterId: pulumi.String("string"),
    		ClusterLogConf: &databricks.JobNewClusterClusterLogConfArgs{
    			Dbfs: &databricks.JobNewClusterClusterLogConfDbfsArgs{
    				Destination: pulumi.String("string"),
    			},
    			S3: &databricks.JobNewClusterClusterLogConfS3Args{
    				Destination:      pulumi.String("string"),
    				CannedAcl:        pulumi.String("string"),
    				EnableEncryption: pulumi.Bool(false),
    				EncryptionType:   pulumi.String("string"),
    				Endpoint:         pulumi.String("string"),
    				KmsKey:           pulumi.String("string"),
    				Region:           pulumi.String("string"),
    			},
    		},
    		ClusterMountInfos: databricks.JobNewClusterClusterMountInfoArray{
    			&databricks.JobNewClusterClusterMountInfoArgs{
    				LocalMountDirPath: pulumi.String("string"),
    				NetworkFilesystemInfo: &databricks.JobNewClusterClusterMountInfoNetworkFilesystemInfoArgs{
    					ServerAddress: pulumi.String("string"),
    					MountOptions:  pulumi.String("string"),
    				},
    				RemoteMountDirPath: pulumi.String("string"),
    			},
    		},
    		InitScripts: databricks.JobNewClusterInitScriptArray{
    			&databricks.JobNewClusterInitScriptArgs{
    				Abfss: &databricks.JobNewClusterInitScriptAbfssArgs{
    					Destination: pulumi.String("string"),
    				},
    				File: &databricks.JobNewClusterInitScriptFileArgs{
    					Destination: pulumi.String("string"),
    				},
    				Gcs: &databricks.JobNewClusterInitScriptGcsArgs{
    					Destination: pulumi.String("string"),
    				},
    				S3: &databricks.JobNewClusterInitScriptS3Args{
    					Destination:      pulumi.String("string"),
    					CannedAcl:        pulumi.String("string"),
    					EnableEncryption: pulumi.Bool(false),
    					EncryptionType:   pulumi.String("string"),
    					Endpoint:         pulumi.String("string"),
    					KmsKey:           pulumi.String("string"),
    					Region:           pulumi.String("string"),
    				},
    				Volumes: &databricks.JobNewClusterInitScriptVolumesArgs{
    					Destination: pulumi.String("string"),
    				},
    				Workspace: &databricks.JobNewClusterInitScriptWorkspaceArgs{
    					Destination: pulumi.String("string"),
    				},
    			},
    		},
    		CustomTags: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		DataSecurityMode: pulumi.String("string"),
    		DockerImage: &databricks.JobNewClusterDockerImageArgs{
    			Url: pulumi.String("string"),
    			BasicAuth: &databricks.JobNewClusterDockerImageBasicAuthArgs{
    				Password: pulumi.String("string"),
    				Username: pulumi.String("string"),
    			},
    		},
    		DriverInstancePoolId:      pulumi.String("string"),
    		DriverNodeTypeId:          pulumi.String("string"),
    		EnableElasticDisk:         pulumi.Bool(false),
    		EnableLocalDiskEncryption: pulumi.Bool(false),
    		WorkloadType: &databricks.JobNewClusterWorkloadTypeArgs{
    			Clients: &databricks.JobNewClusterWorkloadTypeClientsArgs{
    				Jobs:      pulumi.Bool(false),
    				Notebooks: pulumi.Bool(false),
    			},
    		},
    		AwsAttributes: &databricks.JobNewClusterAwsAttributesArgs{
    			Availability:        pulumi.String("string"),
    			EbsVolumeCount:      pulumi.Int(0),
    			EbsVolumeIops:       pulumi.Int(0),
    			EbsVolumeSize:       pulumi.Int(0),
    			EbsVolumeThroughput: pulumi.Int(0),
    			EbsVolumeType:       pulumi.String("string"),
    			FirstOnDemand:       pulumi.Int(0),
    			InstanceProfileArn:  pulumi.String("string"),
    			SpotBidPricePercent: pulumi.Int(0),
    			ZoneId:              pulumi.String("string"),
    		},
    		ClusterName:    pulumi.String("string"),
    		InstancePoolId: pulumi.String("string"),
    		IsSingleNode:   pulumi.Bool(false),
    		Kind:           pulumi.String("string"),
    		Libraries: databricks.JobNewClusterLibraryArray{
    			&databricks.JobNewClusterLibraryArgs{
    				Cran: &databricks.JobNewClusterLibraryCranArgs{
    					Package: pulumi.String("string"),
    					Repo:    pulumi.String("string"),
    				},
    				Egg: pulumi.String("string"),
    				Jar: pulumi.String("string"),
    				Maven: &databricks.JobNewClusterLibraryMavenArgs{
    					Coordinates: pulumi.String("string"),
    					Exclusions: pulumi.StringArray{
    						pulumi.String("string"),
    					},
    					Repo: pulumi.String("string"),
    				},
    				Pypi: &databricks.JobNewClusterLibraryPypiArgs{
    					Package: pulumi.String("string"),
    					Repo:    pulumi.String("string"),
    				},
    				Requirements: pulumi.String("string"),
    				Whl:          pulumi.String("string"),
    			},
    		},
    		NodeTypeId:     pulumi.String("string"),
    		NumWorkers:     pulumi.Int(0),
    		PolicyId:       pulumi.String("string"),
    		RuntimeEngine:  pulumi.String("string"),
    		SingleUserName: pulumi.String("string"),
    		SparkConf: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		SparkEnvVars: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		Autoscale: &databricks.JobNewClusterAutoscaleArgs{
    			MaxWorkers: pulumi.Int(0),
    			MinWorkers: pulumi.Int(0),
    		},
    		ApplyPolicyDefaultValues: pulumi.Bool(false),
    		UseMlRuntime:             pulumi.Bool(false),
    		GcpAttributes: &databricks.JobNewClusterGcpAttributesArgs{
    			Availability:            pulumi.String("string"),
    			BootDiskSize:            pulumi.Int(0),
    			GoogleServiceAccount:    pulumi.String("string"),
    			LocalSsdCount:           pulumi.Int(0),
    			UsePreemptibleExecutors: pulumi.Bool(false),
    			ZoneId:                  pulumi.String("string"),
    		},
    	},
    	NotificationSettings: &databricks.JobNotificationSettingsArgs{
    		NoAlertForCanceledRuns: pulumi.Bool(false),
    		NoAlertForSkippedRuns:  pulumi.Bool(false),
    	},
    	Parameters: databricks.JobParameterArray{
    		&databricks.JobParameterArgs{
    			Default: pulumi.String("string"),
    			Name:    pulumi.String("string"),
    		},
    	},
    	Queue: &databricks.JobQueueArgs{
    		Enabled: pulumi.Bool(false),
    	},
    	RunAs: &databricks.JobRunAsArgs{
    		ServicePrincipalName: pulumi.String("string"),
    		UserName:             pulumi.String("string"),
    	},
    	Schedule: &databricks.JobScheduleArgs{
    		QuartzCronExpression: pulumi.String("string"),
    		TimezoneId:           pulumi.String("string"),
    		PauseStatus:          pulumi.String("string"),
    	},
    	Tags: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	Tasks: databricks.JobTaskArray{
    		&databricks.JobTaskArgs{
    			TaskKey:                 pulumi.String("string"),
    			MinRetryIntervalMillis:  pulumi.Int(0),
    			DisableAutoOptimization: pulumi.Bool(false),
    			NewCluster: &databricks.JobTaskNewClusterArgs{
    				SparkVersion:     pulumi.String("string"),
    				IdempotencyToken: pulumi.String("string"),
    				SshPublicKeys: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				AzureAttributes: &databricks.JobTaskNewClusterAzureAttributesArgs{
    					Availability:  pulumi.String("string"),
    					FirstOnDemand: pulumi.Int(0),
    					LogAnalyticsInfo: &databricks.JobTaskNewClusterAzureAttributesLogAnalyticsInfoArgs{
    						LogAnalyticsPrimaryKey:  pulumi.String("string"),
    						LogAnalyticsWorkspaceId: pulumi.String("string"),
    					},
    					SpotBidMaxPrice: pulumi.Float64(0),
    				},
    				ClusterId: pulumi.String("string"),
    				ClusterLogConf: &databricks.JobTaskNewClusterClusterLogConfArgs{
    					Dbfs: &databricks.JobTaskNewClusterClusterLogConfDbfsArgs{
    						Destination: pulumi.String("string"),
    					},
    					S3: &databricks.JobTaskNewClusterClusterLogConfS3Args{
    						Destination:      pulumi.String("string"),
    						CannedAcl:        pulumi.String("string"),
    						EnableEncryption: pulumi.Bool(false),
    						EncryptionType:   pulumi.String("string"),
    						Endpoint:         pulumi.String("string"),
    						KmsKey:           pulumi.String("string"),
    						Region:           pulumi.String("string"),
    					},
    				},
    				ClusterMountInfos: databricks.JobTaskNewClusterClusterMountInfoArray{
    					&databricks.JobTaskNewClusterClusterMountInfoArgs{
    						LocalMountDirPath: pulumi.String("string"),
    						NetworkFilesystemInfo: &databricks.JobTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs{
    							ServerAddress: pulumi.String("string"),
    							MountOptions:  pulumi.String("string"),
    						},
    						RemoteMountDirPath: pulumi.String("string"),
    					},
    				},
    				InitScripts: databricks.JobTaskNewClusterInitScriptArray{
    					&databricks.JobTaskNewClusterInitScriptArgs{
    						Abfss: &databricks.JobTaskNewClusterInitScriptAbfssArgs{
    							Destination: pulumi.String("string"),
    						},
    						File: &databricks.JobTaskNewClusterInitScriptFileArgs{
    							Destination: pulumi.String("string"),
    						},
    						Gcs: &databricks.JobTaskNewClusterInitScriptGcsArgs{
    							Destination: pulumi.String("string"),
    						},
    						S3: &databricks.JobTaskNewClusterInitScriptS3Args{
    							Destination:      pulumi.String("string"),
    							CannedAcl:        pulumi.String("string"),
    							EnableEncryption: pulumi.Bool(false),
    							EncryptionType:   pulumi.String("string"),
    							Endpoint:         pulumi.String("string"),
    							KmsKey:           pulumi.String("string"),
    							Region:           pulumi.String("string"),
    						},
    						Volumes: &databricks.JobTaskNewClusterInitScriptVolumesArgs{
    							Destination: pulumi.String("string"),
    						},
    						Workspace: &databricks.JobTaskNewClusterInitScriptWorkspaceArgs{
    							Destination: pulumi.String("string"),
    						},
    					},
    				},
    				CustomTags: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				DataSecurityMode: pulumi.String("string"),
    				DockerImage: &databricks.JobTaskNewClusterDockerImageArgs{
    					Url: pulumi.String("string"),
    					BasicAuth: &databricks.JobTaskNewClusterDockerImageBasicAuthArgs{
    						Password: pulumi.String("string"),
    						Username: pulumi.String("string"),
    					},
    				},
    				DriverInstancePoolId:      pulumi.String("string"),
    				DriverNodeTypeId:          pulumi.String("string"),
    				EnableElasticDisk:         pulumi.Bool(false),
    				EnableLocalDiskEncryption: pulumi.Bool(false),
    				WorkloadType: &databricks.JobTaskNewClusterWorkloadTypeArgs{
    					Clients: &databricks.JobTaskNewClusterWorkloadTypeClientsArgs{
    						Jobs:      pulumi.Bool(false),
    						Notebooks: pulumi.Bool(false),
    					},
    				},
    				AwsAttributes: &databricks.JobTaskNewClusterAwsAttributesArgs{
    					Availability:        pulumi.String("string"),
    					EbsVolumeCount:      pulumi.Int(0),
    					EbsVolumeIops:       pulumi.Int(0),
    					EbsVolumeSize:       pulumi.Int(0),
    					EbsVolumeThroughput: pulumi.Int(0),
    					EbsVolumeType:       pulumi.String("string"),
    					FirstOnDemand:       pulumi.Int(0),
    					InstanceProfileArn:  pulumi.String("string"),
    					SpotBidPricePercent: pulumi.Int(0),
    					ZoneId:              pulumi.String("string"),
    				},
    				ClusterName:    pulumi.String("string"),
    				InstancePoolId: pulumi.String("string"),
    				IsSingleNode:   pulumi.Bool(false),
    				Kind:           pulumi.String("string"),
    				Libraries: databricks.JobTaskNewClusterLibraryArray{
    					&databricks.JobTaskNewClusterLibraryArgs{
    						Cran: &databricks.JobTaskNewClusterLibraryCranArgs{
    							Package: pulumi.String("string"),
    							Repo:    pulumi.String("string"),
    						},
    						Egg: pulumi.String("string"),
    						Jar: pulumi.String("string"),
    						Maven: &databricks.JobTaskNewClusterLibraryMavenArgs{
    							Coordinates: pulumi.String("string"),
    							Exclusions: pulumi.StringArray{
    								pulumi.String("string"),
    							},
    							Repo: pulumi.String("string"),
    						},
    						Pypi: &databricks.JobTaskNewClusterLibraryPypiArgs{
    							Package: pulumi.String("string"),
    							Repo:    pulumi.String("string"),
    						},
    						Requirements: pulumi.String("string"),
    						Whl:          pulumi.String("string"),
    					},
    				},
    				NodeTypeId:     pulumi.String("string"),
    				NumWorkers:     pulumi.Int(0),
    				PolicyId:       pulumi.String("string"),
    				RuntimeEngine:  pulumi.String("string"),
    				SingleUserName: pulumi.String("string"),
    				SparkConf: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				SparkEnvVars: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				Autoscale: &databricks.JobTaskNewClusterAutoscaleArgs{
    					MaxWorkers: pulumi.Int(0),
    					MinWorkers: pulumi.Int(0),
    				},
    				ApplyPolicyDefaultValues: pulumi.Bool(false),
    				UseMlRuntime:             pulumi.Bool(false),
    				GcpAttributes: &databricks.JobTaskNewClusterGcpAttributesArgs{
    					Availability:            pulumi.String("string"),
    					BootDiskSize:            pulumi.Int(0),
    					GoogleServiceAccount:    pulumi.String("string"),
    					LocalSsdCount:           pulumi.Int(0),
    					UsePreemptibleExecutors: pulumi.Bool(false),
    					ZoneId:                  pulumi.String("string"),
    				},
    			},
    			Description: pulumi.String("string"),
    			NotebookTask: &databricks.JobTaskNotebookTaskArgs{
    				NotebookPath: pulumi.String("string"),
    				BaseParameters: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				Source:      pulumi.String("string"),
    				WarehouseId: pulumi.String("string"),
    			},
    			EmailNotifications: &databricks.JobTaskEmailNotificationsArgs{
    				NoAlertForSkippedRuns: pulumi.Bool(false),
    				OnDurationWarningThresholdExceededs: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				OnFailures: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				OnStarts: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				OnStreamingBacklogExceededs: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				OnSuccesses: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    			},
    			EnvironmentKey:    pulumi.String("string"),
    			ExistingClusterId: pulumi.String("string"),
    			ForEachTask: &databricks.JobTaskForEachTaskArgs{
    				Inputs: pulumi.String("string"),
    				Task: &databricks.JobTaskForEachTaskTaskArgs{
    					TaskKey: pulumi.String("string"),
    					NewCluster: &databricks.JobTaskForEachTaskTaskNewClusterArgs{
    						SparkVersion:     pulumi.String("string"),
    						IdempotencyToken: pulumi.String("string"),
    						SshPublicKeys: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						AzureAttributes: &databricks.JobTaskForEachTaskTaskNewClusterAzureAttributesArgs{
    							Availability:  pulumi.String("string"),
    							FirstOnDemand: pulumi.Int(0),
    							LogAnalyticsInfo: &databricks.JobTaskForEachTaskTaskNewClusterAzureAttributesLogAnalyticsInfoArgs{
    								LogAnalyticsPrimaryKey:  pulumi.String("string"),
    								LogAnalyticsWorkspaceId: pulumi.String("string"),
    							},
    							SpotBidMaxPrice: pulumi.Float64(0),
    						},
    						ClusterId: pulumi.String("string"),
    						ClusterLogConf: &databricks.JobTaskForEachTaskTaskNewClusterClusterLogConfArgs{
    							Dbfs: &databricks.JobTaskForEachTaskTaskNewClusterClusterLogConfDbfsArgs{
    								Destination: pulumi.String("string"),
    							},
    							S3: &databricks.JobTaskForEachTaskTaskNewClusterClusterLogConfS3Args{
    								Destination:      pulumi.String("string"),
    								CannedAcl:        pulumi.String("string"),
    								EnableEncryption: pulumi.Bool(false),
    								EncryptionType:   pulumi.String("string"),
    								Endpoint:         pulumi.String("string"),
    								KmsKey:           pulumi.String("string"),
    								Region:           pulumi.String("string"),
    							},
    						},
    						ClusterMountInfos: databricks.JobTaskForEachTaskTaskNewClusterClusterMountInfoArray{
    							&databricks.JobTaskForEachTaskTaskNewClusterClusterMountInfoArgs{
    								LocalMountDirPath: pulumi.String("string"),
    								NetworkFilesystemInfo: &databricks.JobTaskForEachTaskTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs{
    									ServerAddress: pulumi.String("string"),
    									MountOptions:  pulumi.String("string"),
    								},
    								RemoteMountDirPath: pulumi.String("string"),
    							},
    						},
    						InitScripts: databricks.JobTaskForEachTaskTaskNewClusterInitScriptArray{
    							&databricks.JobTaskForEachTaskTaskNewClusterInitScriptArgs{
    								Abfss: &databricks.JobTaskForEachTaskTaskNewClusterInitScriptAbfssArgs{
    									Destination: pulumi.String("string"),
    								},
    								File: &databricks.JobTaskForEachTaskTaskNewClusterInitScriptFileArgs{
    									Destination: pulumi.String("string"),
    								},
    								Gcs: &databricks.JobTaskForEachTaskTaskNewClusterInitScriptGcsArgs{
    									Destination: pulumi.String("string"),
    								},
    								S3: &databricks.JobTaskForEachTaskTaskNewClusterInitScriptS3Args{
    									Destination:      pulumi.String("string"),
    									CannedAcl:        pulumi.String("string"),
    									EnableEncryption: pulumi.Bool(false),
    									EncryptionType:   pulumi.String("string"),
    									Endpoint:         pulumi.String("string"),
    									KmsKey:           pulumi.String("string"),
    									Region:           pulumi.String("string"),
    								},
    								Volumes: &databricks.JobTaskForEachTaskTaskNewClusterInitScriptVolumesArgs{
    									Destination: pulumi.String("string"),
    								},
    								Workspace: &databricks.JobTaskForEachTaskTaskNewClusterInitScriptWorkspaceArgs{
    									Destination: pulumi.String("string"),
    								},
    							},
    						},
    						CustomTags: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						DataSecurityMode: pulumi.String("string"),
    						DockerImage: &databricks.JobTaskForEachTaskTaskNewClusterDockerImageArgs{
    							Url: pulumi.String("string"),
    							BasicAuth: &databricks.JobTaskForEachTaskTaskNewClusterDockerImageBasicAuthArgs{
    								Password: pulumi.String("string"),
    								Username: pulumi.String("string"),
    							},
    						},
    						DriverInstancePoolId:      pulumi.String("string"),
    						DriverNodeTypeId:          pulumi.String("string"),
    						EnableElasticDisk:         pulumi.Bool(false),
    						EnableLocalDiskEncryption: pulumi.Bool(false),
    						WorkloadType: &databricks.JobTaskForEachTaskTaskNewClusterWorkloadTypeArgs{
    							Clients: &databricks.JobTaskForEachTaskTaskNewClusterWorkloadTypeClientsArgs{
    								Jobs:      pulumi.Bool(false),
    								Notebooks: pulumi.Bool(false),
    							},
    						},
    						AwsAttributes: &databricks.JobTaskForEachTaskTaskNewClusterAwsAttributesArgs{
    							Availability:        pulumi.String("string"),
    							EbsVolumeCount:      pulumi.Int(0),
    							EbsVolumeIops:       pulumi.Int(0),
    							EbsVolumeSize:       pulumi.Int(0),
    							EbsVolumeThroughput: pulumi.Int(0),
    							EbsVolumeType:       pulumi.String("string"),
    							FirstOnDemand:       pulumi.Int(0),
    							InstanceProfileArn:  pulumi.String("string"),
    							SpotBidPricePercent: pulumi.Int(0),
    							ZoneId:              pulumi.String("string"),
    						},
    						ClusterName:    pulumi.String("string"),
    						InstancePoolId: pulumi.String("string"),
    						IsSingleNode:   pulumi.Bool(false),
    						Kind:           pulumi.String("string"),
    						Libraries: databricks.JobTaskForEachTaskTaskNewClusterLibraryArray{
    							&databricks.JobTaskForEachTaskTaskNewClusterLibraryArgs{
    								Cran: &databricks.JobTaskForEachTaskTaskNewClusterLibraryCranArgs{
    									Package: pulumi.String("string"),
    									Repo:    pulumi.String("string"),
    								},
    								Egg: pulumi.String("string"),
    								Jar: pulumi.String("string"),
    								Maven: &databricks.JobTaskForEachTaskTaskNewClusterLibraryMavenArgs{
    									Coordinates: pulumi.String("string"),
    									Exclusions: pulumi.StringArray{
    										pulumi.String("string"),
    									},
    									Repo: pulumi.String("string"),
    								},
    								Pypi: &databricks.JobTaskForEachTaskTaskNewClusterLibraryPypiArgs{
    									Package: pulumi.String("string"),
    									Repo:    pulumi.String("string"),
    								},
    								Requirements: pulumi.String("string"),
    								Whl:          pulumi.String("string"),
    							},
    						},
    						NodeTypeId:     pulumi.String("string"),
    						NumWorkers:     pulumi.Int(0),
    						PolicyId:       pulumi.String("string"),
    						RuntimeEngine:  pulumi.String("string"),
    						SingleUserName: pulumi.String("string"),
    						SparkConf: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						SparkEnvVars: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						Autoscale: &databricks.JobTaskForEachTaskTaskNewClusterAutoscaleArgs{
    							MaxWorkers: pulumi.Int(0),
    							MinWorkers: pulumi.Int(0),
    						},
    						ApplyPolicyDefaultValues: pulumi.Bool(false),
    						UseMlRuntime:             pulumi.Bool(false),
    						GcpAttributes: &databricks.JobTaskForEachTaskTaskNewClusterGcpAttributesArgs{
    							Availability:            pulumi.String("string"),
    							BootDiskSize:            pulumi.Int(0),
    							GoogleServiceAccount:    pulumi.String("string"),
    							LocalSsdCount:           pulumi.Int(0),
    							UsePreemptibleExecutors: pulumi.Bool(false),
    							ZoneId:                  pulumi.String("string"),
    						},
    					},
    					ConditionTask: &databricks.JobTaskForEachTaskTaskConditionTaskArgs{
    						Left:  pulumi.String("string"),
    						Op:    pulumi.String("string"),
    						Right: pulumi.String("string"),
    					},
    					DependsOns: databricks.JobTaskForEachTaskTaskDependsOnArray{
    						&databricks.JobTaskForEachTaskTaskDependsOnArgs{
    							TaskKey: pulumi.String("string"),
    							Outcome: pulumi.String("string"),
    						},
    					},
    					Description:             pulumi.String("string"),
    					DisableAutoOptimization: pulumi.Bool(false),
    					EmailNotifications: &databricks.JobTaskForEachTaskTaskEmailNotificationsArgs{
    						NoAlertForSkippedRuns: pulumi.Bool(false),
    						OnDurationWarningThresholdExceededs: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						OnFailures: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						OnStarts: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						OnStreamingBacklogExceededs: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						OnSuccesses: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    					},
    					EnvironmentKey:    pulumi.String("string"),
    					ExistingClusterId: pulumi.String("string"),
    					Health: &databricks.JobTaskForEachTaskTaskHealthArgs{
    						Rules: databricks.JobTaskForEachTaskTaskHealthRuleArray{
    							&databricks.JobTaskForEachTaskTaskHealthRuleArgs{
    								Metric: pulumi.String("string"),
    								Op:     pulumi.String("string"),
    								Value:  pulumi.Int(0),
    							},
    						},
    					},
    					JobClusterKey: pulumi.String("string"),
    					Libraries: databricks.JobTaskForEachTaskTaskLibraryArray{
    						&databricks.JobTaskForEachTaskTaskLibraryArgs{
    							Cran: &databricks.JobTaskForEachTaskTaskLibraryCranArgs{
    								Package: pulumi.String("string"),
    								Repo:    pulumi.String("string"),
    							},
    							Egg: pulumi.String("string"),
    							Jar: pulumi.String("string"),
    							Maven: &databricks.JobTaskForEachTaskTaskLibraryMavenArgs{
    								Coordinates: pulumi.String("string"),
    								Exclusions: pulumi.StringArray{
    									pulumi.String("string"),
    								},
    								Repo: pulumi.String("string"),
    							},
    							Pypi: &databricks.JobTaskForEachTaskTaskLibraryPypiArgs{
    								Package: pulumi.String("string"),
    								Repo:    pulumi.String("string"),
    							},
    							Requirements: pulumi.String("string"),
    							Whl:          pulumi.String("string"),
    						},
    					},
    					MaxRetries: pulumi.Int(0),
    					WebhookNotifications: &databricks.JobTaskForEachTaskTaskWebhookNotificationsArgs{
    						OnDurationWarningThresholdExceededs: databricks.JobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceededArray{
    							&databricks.JobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs{
    								Id: pulumi.String("string"),
    							},
    						},
    						OnFailures: databricks.JobTaskForEachTaskTaskWebhookNotificationsOnFailureArray{
    							&databricks.JobTaskForEachTaskTaskWebhookNotificationsOnFailureArgs{
    								Id: pulumi.String("string"),
    							},
    						},
    						OnStarts: databricks.JobTaskForEachTaskTaskWebhookNotificationsOnStartArray{
    							&databricks.JobTaskForEachTaskTaskWebhookNotificationsOnStartArgs{
    								Id: pulumi.String("string"),
    							},
    						},
    						OnStreamingBacklogExceededs: databricks.JobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceededArray{
    							&databricks.JobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceededArgs{
    								Id: pulumi.String("string"),
    							},
    						},
    						OnSuccesses: databricks.JobTaskForEachTaskTaskWebhookNotificationsOnSuccessArray{
    							&databricks.JobTaskForEachTaskTaskWebhookNotificationsOnSuccessArgs{
    								Id: pulumi.String("string"),
    							},
    						},
    					},
    					DbtTask: &databricks.JobTaskForEachTaskTaskDbtTaskArgs{
    						Commands: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						Catalog:           pulumi.String("string"),
    						ProfilesDirectory: pulumi.String("string"),
    						ProjectDirectory:  pulumi.String("string"),
    						Schema:            pulumi.String("string"),
    						Source:            pulumi.String("string"),
    						WarehouseId:       pulumi.String("string"),
    					},
    					RetryOnTimeout: pulumi.Bool(false),
    					NotificationSettings: &databricks.JobTaskForEachTaskTaskNotificationSettingsArgs{
    						AlertOnLastAttempt:     pulumi.Bool(false),
    						NoAlertForCanceledRuns: pulumi.Bool(false),
    						NoAlertForSkippedRuns:  pulumi.Bool(false),
    					},
    					PipelineTask: &databricks.JobTaskForEachTaskTaskPipelineTaskArgs{
    						PipelineId:  pulumi.String("string"),
    						FullRefresh: pulumi.Bool(false),
    					},
    					PythonWheelTask: &databricks.JobTaskForEachTaskTaskPythonWheelTaskArgs{
    						EntryPoint: pulumi.String("string"),
    						NamedParameters: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						PackageName: pulumi.String("string"),
    						Parameters: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    					},
    					NotebookTask: &databricks.JobTaskForEachTaskTaskNotebookTaskArgs{
    						NotebookPath: pulumi.String("string"),
    						BaseParameters: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						Source:      pulumi.String("string"),
    						WarehouseId: pulumi.String("string"),
    					},
    					RunIf: pulumi.String("string"),
    					RunJobTask: &databricks.JobTaskForEachTaskTaskRunJobTaskArgs{
    						JobId: pulumi.Int(0),
    						DbtCommands: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						JarParams: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						JobParameters: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						NotebookParams: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						PipelineParams: &databricks.JobTaskForEachTaskTaskRunJobTaskPipelineParamsArgs{
    							FullRefresh: pulumi.Bool(false),
    						},
    						PythonNamedParams: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						PythonParams: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						SparkSubmitParams: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						SqlParams: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    					},
    					SparkJarTask: &databricks.JobTaskForEachTaskTaskSparkJarTaskArgs{
    						JarUri:        pulumi.String("string"),
    						MainClassName: pulumi.String("string"),
    						Parameters: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						RunAsRepl: pulumi.Bool(false),
    					},
    					SparkPythonTask: &databricks.JobTaskForEachTaskTaskSparkPythonTaskArgs{
    						PythonFile: pulumi.String("string"),
    						Parameters: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						Source: pulumi.String("string"),
    					},
    					SparkSubmitTask: &databricks.JobTaskForEachTaskTaskSparkSubmitTaskArgs{
    						Parameters: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    					},
    					SqlTask: &databricks.JobTaskForEachTaskTaskSqlTaskArgs{
    						WarehouseId: pulumi.String("string"),
    						Alert: &databricks.JobTaskForEachTaskTaskSqlTaskAlertArgs{
    							AlertId:            pulumi.String("string"),
    							PauseSubscriptions: pulumi.Bool(false),
    							Subscriptions: databricks.JobTaskForEachTaskTaskSqlTaskAlertSubscriptionArray{
    								&databricks.JobTaskForEachTaskTaskSqlTaskAlertSubscriptionArgs{
    									DestinationId: pulumi.String("string"),
    									UserName:      pulumi.String("string"),
    								},
    							},
    						},
    						Dashboard: &databricks.JobTaskForEachTaskTaskSqlTaskDashboardArgs{
    							DashboardId:        pulumi.String("string"),
    							CustomSubject:      pulumi.String("string"),
    							PauseSubscriptions: pulumi.Bool(false),
    							Subscriptions: databricks.JobTaskForEachTaskTaskSqlTaskDashboardSubscriptionArray{
    								&databricks.JobTaskForEachTaskTaskSqlTaskDashboardSubscriptionArgs{
    									DestinationId: pulumi.String("string"),
    									UserName:      pulumi.String("string"),
    								},
    							},
    						},
    						File: &databricks.JobTaskForEachTaskTaskSqlTaskFileArgs{
    							Path:   pulumi.String("string"),
    							Source: pulumi.String("string"),
    						},
    						Parameters: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    						Query: &databricks.JobTaskForEachTaskTaskSqlTaskQueryArgs{
    							QueryId: pulumi.String("string"),
    						},
    					},
    					CleanRoomsNotebookTask: &databricks.JobTaskForEachTaskTaskCleanRoomsNotebookTaskArgs{
    						CleanRoomName: pulumi.String("string"),
    						NotebookName:  pulumi.String("string"),
    						Etag:          pulumi.String("string"),
    						NotebookBaseParameters: pulumi.StringMap{
    							"string": pulumi.String("string"),
    						},
    					},
    					TimeoutSeconds:         pulumi.Int(0),
    					MinRetryIntervalMillis: pulumi.Int(0),
    				},
    				Concurrency: pulumi.Int(0),
    			},
    			Health: &databricks.JobTaskHealthArgs{
    				Rules: databricks.JobTaskHealthRuleArray{
    					&databricks.JobTaskHealthRuleArgs{
    						Metric: pulumi.String("string"),
    						Op:     pulumi.String("string"),
    						Value:  pulumi.Int(0),
    					},
    				},
    			},
    			JobClusterKey: pulumi.String("string"),
    			NotificationSettings: &databricks.JobTaskNotificationSettingsArgs{
    				AlertOnLastAttempt:     pulumi.Bool(false),
    				NoAlertForCanceledRuns: pulumi.Bool(false),
    				NoAlertForSkippedRuns:  pulumi.Bool(false),
    			},
    			MaxRetries: pulumi.Int(0),
    			CleanRoomsNotebookTask: &databricks.JobTaskCleanRoomsNotebookTaskArgs{
    				CleanRoomName: pulumi.String("string"),
    				NotebookName:  pulumi.String("string"),
    				Etag:          pulumi.String("string"),
    				NotebookBaseParameters: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    			},
    			DependsOns: databricks.JobTaskDependsOnArray{
    				&databricks.JobTaskDependsOnArgs{
    					TaskKey: pulumi.String("string"),
    					Outcome: pulumi.String("string"),
    				},
    			},
    			DbtTask: &databricks.JobTaskDbtTaskArgs{
    				Commands: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				Catalog:           pulumi.String("string"),
    				ProfilesDirectory: pulumi.String("string"),
    				ProjectDirectory:  pulumi.String("string"),
    				Schema:            pulumi.String("string"),
    				Source:            pulumi.String("string"),
    				WarehouseId:       pulumi.String("string"),
    			},
    			Libraries: databricks.JobTaskLibraryArray{
    				&databricks.JobTaskLibraryArgs{
    					Cran: &databricks.JobTaskLibraryCranArgs{
    						Package: pulumi.String("string"),
    						Repo:    pulumi.String("string"),
    					},
    					Egg: pulumi.String("string"),
    					Jar: pulumi.String("string"),
    					Maven: &databricks.JobTaskLibraryMavenArgs{
    						Coordinates: pulumi.String("string"),
    						Exclusions: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						Repo: pulumi.String("string"),
    					},
    					Pypi: &databricks.JobTaskLibraryPypiArgs{
    						Package: pulumi.String("string"),
    						Repo:    pulumi.String("string"),
    					},
    					Requirements: pulumi.String("string"),
    					Whl:          pulumi.String("string"),
    				},
    			},
    			PipelineTask: &databricks.JobTaskPipelineTaskArgs{
    				PipelineId:  pulumi.String("string"),
    				FullRefresh: pulumi.Bool(false),
    			},
    			PythonWheelTask: &databricks.JobTaskPythonWheelTaskArgs{
    				EntryPoint: pulumi.String("string"),
    				NamedParameters: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				PackageName: pulumi.String("string"),
    				Parameters: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    			},
    			RetryOnTimeout: pulumi.Bool(false),
    			RunIf:          pulumi.String("string"),
    			RunJobTask: &databricks.JobTaskRunJobTaskArgs{
    				JobId: pulumi.Int(0),
    				DbtCommands: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				JarParams: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				JobParameters: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				NotebookParams: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				PipelineParams: &databricks.JobTaskRunJobTaskPipelineParamsArgs{
    					FullRefresh: pulumi.Bool(false),
    				},
    				PythonNamedParams: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				PythonParams: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				SparkSubmitParams: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				SqlParams: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    			},
    			SparkJarTask: &databricks.JobTaskSparkJarTaskArgs{
    				JarUri:        pulumi.String("string"),
    				MainClassName: pulumi.String("string"),
    				Parameters: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				RunAsRepl: pulumi.Bool(false),
    			},
    			SparkPythonTask: &databricks.JobTaskSparkPythonTaskArgs{
    				PythonFile: pulumi.String("string"),
    				Parameters: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				Source: pulumi.String("string"),
    			},
    			SparkSubmitTask: &databricks.JobTaskSparkSubmitTaskArgs{
    				Parameters: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    			},
    			SqlTask: &databricks.JobTaskSqlTaskArgs{
    				WarehouseId: pulumi.String("string"),
    				Alert: &databricks.JobTaskSqlTaskAlertArgs{
    					AlertId:            pulumi.String("string"),
    					PauseSubscriptions: pulumi.Bool(false),
    					Subscriptions: databricks.JobTaskSqlTaskAlertSubscriptionArray{
    						&databricks.JobTaskSqlTaskAlertSubscriptionArgs{
    							DestinationId: pulumi.String("string"),
    							UserName:      pulumi.String("string"),
    						},
    					},
    				},
    				Dashboard: &databricks.JobTaskSqlTaskDashboardArgs{
    					DashboardId:        pulumi.String("string"),
    					CustomSubject:      pulumi.String("string"),
    					PauseSubscriptions: pulumi.Bool(false),
    					Subscriptions: databricks.JobTaskSqlTaskDashboardSubscriptionArray{
    						&databricks.JobTaskSqlTaskDashboardSubscriptionArgs{
    							DestinationId: pulumi.String("string"),
    							UserName:      pulumi.String("string"),
    						},
    					},
    				},
    				File: &databricks.JobTaskSqlTaskFileArgs{
    					Path:   pulumi.String("string"),
    					Source: pulumi.String("string"),
    				},
    				Parameters: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    				Query: &databricks.JobTaskSqlTaskQueryArgs{
    					QueryId: pulumi.String("string"),
    				},
    			},
    			ConditionTask: &databricks.JobTaskConditionTaskArgs{
    				Left:  pulumi.String("string"),
    				Op:    pulumi.String("string"),
    				Right: pulumi.String("string"),
    			},
    			TimeoutSeconds: pulumi.Int(0),
    			WebhookNotifications: &databricks.JobTaskWebhookNotificationsArgs{
    				OnDurationWarningThresholdExceededs: databricks.JobTaskWebhookNotificationsOnDurationWarningThresholdExceededArray{
    					&databricks.JobTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs{
    						Id: pulumi.String("string"),
    					},
    				},
    				OnFailures: databricks.JobTaskWebhookNotificationsOnFailureArray{
    					&databricks.JobTaskWebhookNotificationsOnFailureArgs{
    						Id: pulumi.String("string"),
    					},
    				},
    				OnStarts: databricks.JobTaskWebhookNotificationsOnStartArray{
    					&databricks.JobTaskWebhookNotificationsOnStartArgs{
    						Id: pulumi.String("string"),
    					},
    				},
    				OnStreamingBacklogExceededs: databricks.JobTaskWebhookNotificationsOnStreamingBacklogExceededArray{
    					&databricks.JobTaskWebhookNotificationsOnStreamingBacklogExceededArgs{
    						Id: pulumi.String("string"),
    					},
    				},
    				OnSuccesses: databricks.JobTaskWebhookNotificationsOnSuccessArray{
    					&databricks.JobTaskWebhookNotificationsOnSuccessArgs{
    						Id: pulumi.String("string"),
    					},
    				},
    			},
    		},
    	},
    	TimeoutSeconds: pulumi.Int(0),
    	Trigger: &databricks.JobTriggerArgs{
    		FileArrival: &databricks.JobTriggerFileArrivalArgs{
    			Url:                           pulumi.String("string"),
    			MinTimeBetweenTriggersSeconds: pulumi.Int(0),
    			WaitAfterLastChangeSeconds:    pulumi.Int(0),
    		},
    		PauseStatus: pulumi.String("string"),
    		Periodic: &databricks.JobTriggerPeriodicArgs{
    			Interval: pulumi.Int(0),
    			Unit:     pulumi.String("string"),
    		},
    		Table: &databricks.JobTriggerTableArgs{
    			Condition:                     pulumi.String("string"),
    			MinTimeBetweenTriggersSeconds: pulumi.Int(0),
    			TableNames: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			WaitAfterLastChangeSeconds: pulumi.Int(0),
    		},
    		TableUpdate: &databricks.JobTriggerTableUpdateArgs{
    			TableNames: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			Condition:                     pulumi.String("string"),
    			MinTimeBetweenTriggersSeconds: pulumi.Int(0),
    			WaitAfterLastChangeSeconds:    pulumi.Int(0),
    		},
    	},
    	WebhookNotifications: &databricks.JobWebhookNotificationsArgs{
    		OnDurationWarningThresholdExceededs: databricks.JobWebhookNotificationsOnDurationWarningThresholdExceededArray{
    			&databricks.JobWebhookNotificationsOnDurationWarningThresholdExceededArgs{
    				Id: pulumi.String("string"),
    			},
    		},
    		OnFailures: databricks.JobWebhookNotificationsOnFailureArray{
    			&databricks.JobWebhookNotificationsOnFailureArgs{
    				Id: pulumi.String("string"),
    			},
    		},
    		OnStarts: databricks.JobWebhookNotificationsOnStartArray{
    			&databricks.JobWebhookNotificationsOnStartArgs{
    				Id: pulumi.String("string"),
    			},
    		},
    		OnStreamingBacklogExceededs: databricks.JobWebhookNotificationsOnStreamingBacklogExceededArray{
    			&databricks.JobWebhookNotificationsOnStreamingBacklogExceededArgs{
    				Id: pulumi.String("string"),
    			},
    		},
    		OnSuccesses: databricks.JobWebhookNotificationsOnSuccessArray{
    			&databricks.JobWebhookNotificationsOnSuccessArgs{
    				Id: pulumi.String("string"),
    			},
    		},
    	},
    })
    
    var jobResource = new Job("jobResource", JobArgs.builder()
        .budgetPolicyId("string")
        .continuous(JobContinuousArgs.builder()
            .pauseStatus("string")
            .build())
        .controlRunState(false)
        .deployment(JobDeploymentArgs.builder()
            .kind("string")
            .metadataFilePath("string")
            .build())
        .description("string")
        .editMode("string")
        .emailNotifications(JobEmailNotificationsArgs.builder()
            .noAlertForSkippedRuns(false)
            .onDurationWarningThresholdExceededs("string")
            .onFailures("string")
            .onStarts("string")
            .onStreamingBacklogExceededs("string")
            .onSuccesses("string")
            .build())
        .environments(JobEnvironmentArgs.builder()
            .environmentKey("string")
            .spec(JobEnvironmentSpecArgs.builder()
                .client("string")
                .dependencies("string")
                .build())
            .build())
        .existingClusterId("string")
        .format("string")
        .gitSource(JobGitSourceArgs.builder()
            .url("string")
            .branch("string")
            .commit("string")
            .gitSnapshot(JobGitSourceGitSnapshotArgs.builder()
                .usedCommit("string")
                .build())
            .jobSource(JobGitSourceJobSourceArgs.builder()
                .importFromGitBranch("string")
                .jobConfigPath("string")
                .dirtyState("string")
                .build())
            .provider("string")
            .tag("string")
            .build())
        .health(JobHealthArgs.builder()
            .rules(JobHealthRuleArgs.builder()
                .metric("string")
                .op("string")
                .value(0)
                .build())
            .build())
        .jobClusters(JobJobClusterArgs.builder()
            .jobClusterKey("string")
            .newCluster(JobJobClusterNewClusterArgs.builder()
                .sparkVersion("string")
                .idempotencyToken("string")
                .sshPublicKeys("string")
                .azureAttributes(JobJobClusterNewClusterAzureAttributesArgs.builder()
                    .availability("string")
                    .firstOnDemand(0)
                    .logAnalyticsInfo(JobJobClusterNewClusterAzureAttributesLogAnalyticsInfoArgs.builder()
                        .logAnalyticsPrimaryKey("string")
                        .logAnalyticsWorkspaceId("string")
                        .build())
                    .spotBidMaxPrice(0)
                    .build())
                .clusterId("string")
                .clusterLogConf(JobJobClusterNewClusterClusterLogConfArgs.builder()
                    .dbfs(JobJobClusterNewClusterClusterLogConfDbfsArgs.builder()
                        .destination("string")
                        .build())
                    .s3(JobJobClusterNewClusterClusterLogConfS3Args.builder()
                        .destination("string")
                        .cannedAcl("string")
                        .enableEncryption(false)
                        .encryptionType("string")
                        .endpoint("string")
                        .kmsKey("string")
                        .region("string")
                        .build())
                    .build())
                .clusterMountInfos(JobJobClusterNewClusterClusterMountInfoArgs.builder()
                    .localMountDirPath("string")
                    .networkFilesystemInfo(JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoArgs.builder()
                        .serverAddress("string")
                        .mountOptions("string")
                        .build())
                    .remoteMountDirPath("string")
                    .build())
                .initScripts(JobJobClusterNewClusterInitScriptArgs.builder()
                    .abfss(JobJobClusterNewClusterInitScriptAbfssArgs.builder()
                        .destination("string")
                        .build())
                    .file(JobJobClusterNewClusterInitScriptFileArgs.builder()
                        .destination("string")
                        .build())
                    .gcs(JobJobClusterNewClusterInitScriptGcsArgs.builder()
                        .destination("string")
                        .build())
                    .s3(JobJobClusterNewClusterInitScriptS3Args.builder()
                        .destination("string")
                        .cannedAcl("string")
                        .enableEncryption(false)
                        .encryptionType("string")
                        .endpoint("string")
                        .kmsKey("string")
                        .region("string")
                        .build())
                    .volumes(JobJobClusterNewClusterInitScriptVolumesArgs.builder()
                        .destination("string")
                        .build())
                    .workspace(JobJobClusterNewClusterInitScriptWorkspaceArgs.builder()
                        .destination("string")
                        .build())
                    .build())
                .customTags(Map.of("string", "string"))
                .dataSecurityMode("string")
                .dockerImage(JobJobClusterNewClusterDockerImageArgs.builder()
                    .url("string")
                    .basicAuth(JobJobClusterNewClusterDockerImageBasicAuthArgs.builder()
                        .password("string")
                        .username("string")
                        .build())
                    .build())
                .driverInstancePoolId("string")
                .driverNodeTypeId("string")
                .enableElasticDisk(false)
                .enableLocalDiskEncryption(false)
                .workloadType(JobJobClusterNewClusterWorkloadTypeArgs.builder()
                    .clients(JobJobClusterNewClusterWorkloadTypeClientsArgs.builder()
                        .jobs(false)
                        .notebooks(false)
                        .build())
                    .build())
                .awsAttributes(JobJobClusterNewClusterAwsAttributesArgs.builder()
                    .availability("string")
                    .ebsVolumeCount(0)
                    .ebsVolumeIops(0)
                    .ebsVolumeSize(0)
                    .ebsVolumeThroughput(0)
                    .ebsVolumeType("string")
                    .firstOnDemand(0)
                    .instanceProfileArn("string")
                    .spotBidPricePercent(0)
                    .zoneId("string")
                    .build())
                .clusterName("string")
                .instancePoolId("string")
                .isSingleNode(false)
                .kind("string")
                .libraries(JobJobClusterNewClusterLibraryArgs.builder()
                    .cran(JobJobClusterNewClusterLibraryCranArgs.builder()
                        .package_("string")
                        .repo("string")
                        .build())
                    .egg("string")
                    .jar("string")
                    .maven(JobJobClusterNewClusterLibraryMavenArgs.builder()
                        .coordinates("string")
                        .exclusions("string")
                        .repo("string")
                        .build())
                    .pypi(JobJobClusterNewClusterLibraryPypiArgs.builder()
                        .package_("string")
                        .repo("string")
                        .build())
                    .requirements("string")
                    .whl("string")
                    .build())
                .nodeTypeId("string")
                .numWorkers(0)
                .policyId("string")
                .runtimeEngine("string")
                .singleUserName("string")
                .sparkConf(Map.of("string", "string"))
                .sparkEnvVars(Map.of("string", "string"))
                .autoscale(JobJobClusterNewClusterAutoscaleArgs.builder()
                    .maxWorkers(0)
                    .minWorkers(0)
                    .build())
                .applyPolicyDefaultValues(false)
                .useMlRuntime(false)
                .gcpAttributes(JobJobClusterNewClusterGcpAttributesArgs.builder()
                    .availability("string")
                    .bootDiskSize(0)
                    .googleServiceAccount("string")
                    .localSsdCount(0)
                    .usePreemptibleExecutors(false)
                    .zoneId("string")
                    .build())
                .build())
            .build())
        .libraries(JobLibraryArgs.builder()
            .cran(JobLibraryCranArgs.builder()
                .package_("string")
                .repo("string")
                .build())
            .egg("string")
            .jar("string")
            .maven(JobLibraryMavenArgs.builder()
                .coordinates("string")
                .exclusions("string")
                .repo("string")
                .build())
            .pypi(JobLibraryPypiArgs.builder()
                .package_("string")
                .repo("string")
                .build())
            .requirements("string")
            .whl("string")
            .build())
        .maxConcurrentRuns(0)
        .name("string")
        .newCluster(JobNewClusterArgs.builder()
            .sparkVersion("string")
            .idempotencyToken("string")
            .sshPublicKeys("string")
            .azureAttributes(JobNewClusterAzureAttributesArgs.builder()
                .availability("string")
                .firstOnDemand(0)
                .logAnalyticsInfo(JobNewClusterAzureAttributesLogAnalyticsInfoArgs.builder()
                    .logAnalyticsPrimaryKey("string")
                    .logAnalyticsWorkspaceId("string")
                    .build())
                .spotBidMaxPrice(0)
                .build())
            .clusterId("string")
            .clusterLogConf(JobNewClusterClusterLogConfArgs.builder()
                .dbfs(JobNewClusterClusterLogConfDbfsArgs.builder()
                    .destination("string")
                    .build())
                .s3(JobNewClusterClusterLogConfS3Args.builder()
                    .destination("string")
                    .cannedAcl("string")
                    .enableEncryption(false)
                    .encryptionType("string")
                    .endpoint("string")
                    .kmsKey("string")
                    .region("string")
                    .build())
                .build())
            .clusterMountInfos(JobNewClusterClusterMountInfoArgs.builder()
                .localMountDirPath("string")
                .networkFilesystemInfo(JobNewClusterClusterMountInfoNetworkFilesystemInfoArgs.builder()
                    .serverAddress("string")
                    .mountOptions("string")
                    .build())
                .remoteMountDirPath("string")
                .build())
            .initScripts(JobNewClusterInitScriptArgs.builder()
                .abfss(JobNewClusterInitScriptAbfssArgs.builder()
                    .destination("string")
                    .build())
                .file(JobNewClusterInitScriptFileArgs.builder()
                    .destination("string")
                    .build())
                .gcs(JobNewClusterInitScriptGcsArgs.builder()
                    .destination("string")
                    .build())
                .s3(JobNewClusterInitScriptS3Args.builder()
                    .destination("string")
                    .cannedAcl("string")
                    .enableEncryption(false)
                    .encryptionType("string")
                    .endpoint("string")
                    .kmsKey("string")
                    .region("string")
                    .build())
                .volumes(JobNewClusterInitScriptVolumesArgs.builder()
                    .destination("string")
                    .build())
                .workspace(JobNewClusterInitScriptWorkspaceArgs.builder()
                    .destination("string")
                    .build())
                .build())
            .customTags(Map.of("string", "string"))
            .dataSecurityMode("string")
            .dockerImage(JobNewClusterDockerImageArgs.builder()
                .url("string")
                .basicAuth(JobNewClusterDockerImageBasicAuthArgs.builder()
                    .password("string")
                    .username("string")
                    .build())
                .build())
            .driverInstancePoolId("string")
            .driverNodeTypeId("string")
            .enableElasticDisk(false)
            .enableLocalDiskEncryption(false)
            .workloadType(JobNewClusterWorkloadTypeArgs.builder()
                .clients(JobNewClusterWorkloadTypeClientsArgs.builder()
                    .jobs(false)
                    .notebooks(false)
                    .build())
                .build())
            .awsAttributes(JobNewClusterAwsAttributesArgs.builder()
                .availability("string")
                .ebsVolumeCount(0)
                .ebsVolumeIops(0)
                .ebsVolumeSize(0)
                .ebsVolumeThroughput(0)
                .ebsVolumeType("string")
                .firstOnDemand(0)
                .instanceProfileArn("string")
                .spotBidPricePercent(0)
                .zoneId("string")
                .build())
            .clusterName("string")
            .instancePoolId("string")
            .isSingleNode(false)
            .kind("string")
            .libraries(JobNewClusterLibraryArgs.builder()
                .cran(JobNewClusterLibraryCranArgs.builder()
                    .package_("string")
                    .repo("string")
                    .build())
                .egg("string")
                .jar("string")
                .maven(JobNewClusterLibraryMavenArgs.builder()
                    .coordinates("string")
                    .exclusions("string")
                    .repo("string")
                    .build())
                .pypi(JobNewClusterLibraryPypiArgs.builder()
                    .package_("string")
                    .repo("string")
                    .build())
                .requirements("string")
                .whl("string")
                .build())
            .nodeTypeId("string")
            .numWorkers(0)
            .policyId("string")
            .runtimeEngine("string")
            .singleUserName("string")
            .sparkConf(Map.of("string", "string"))
            .sparkEnvVars(Map.of("string", "string"))
            .autoscale(JobNewClusterAutoscaleArgs.builder()
                .maxWorkers(0)
                .minWorkers(0)
                .build())
            .applyPolicyDefaultValues(false)
            .useMlRuntime(false)
            .gcpAttributes(JobNewClusterGcpAttributesArgs.builder()
                .availability("string")
                .bootDiskSize(0)
                .googleServiceAccount("string")
                .localSsdCount(0)
                .usePreemptibleExecutors(false)
                .zoneId("string")
                .build())
            .build())
        .notificationSettings(JobNotificationSettingsArgs.builder()
            .noAlertForCanceledRuns(false)
            .noAlertForSkippedRuns(false)
            .build())
        .parameters(JobParameterArgs.builder()
            .default_("string")
            .name("string")
            .build())
        .queue(JobQueueArgs.builder()
            .enabled(false)
            .build())
        .runAs(JobRunAsArgs.builder()
            .servicePrincipalName("string")
            .userName("string")
            .build())
        .schedule(JobScheduleArgs.builder()
            .quartzCronExpression("string")
            .timezoneId("string")
            .pauseStatus("string")
            .build())
        .tags(Map.of("string", "string"))
        .tasks(JobTaskArgs.builder()
            .taskKey("string")
            .minRetryIntervalMillis(0)
            .disableAutoOptimization(false)
            .newCluster(JobTaskNewClusterArgs.builder()
                .sparkVersion("string")
                .idempotencyToken("string")
                .sshPublicKeys("string")
                .azureAttributes(JobTaskNewClusterAzureAttributesArgs.builder()
                    .availability("string")
                    .firstOnDemand(0)
                    .logAnalyticsInfo(JobTaskNewClusterAzureAttributesLogAnalyticsInfoArgs.builder()
                        .logAnalyticsPrimaryKey("string")
                        .logAnalyticsWorkspaceId("string")
                        .build())
                    .spotBidMaxPrice(0)
                    .build())
                .clusterId("string")
                .clusterLogConf(JobTaskNewClusterClusterLogConfArgs.builder()
                    .dbfs(JobTaskNewClusterClusterLogConfDbfsArgs.builder()
                        .destination("string")
                        .build())
                    .s3(JobTaskNewClusterClusterLogConfS3Args.builder()
                        .destination("string")
                        .cannedAcl("string")
                        .enableEncryption(false)
                        .encryptionType("string")
                        .endpoint("string")
                        .kmsKey("string")
                        .region("string")
                        .build())
                    .build())
                .clusterMountInfos(JobTaskNewClusterClusterMountInfoArgs.builder()
                    .localMountDirPath("string")
                    .networkFilesystemInfo(JobTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs.builder()
                        .serverAddress("string")
                        .mountOptions("string")
                        .build())
                    .remoteMountDirPath("string")
                    .build())
                .initScripts(JobTaskNewClusterInitScriptArgs.builder()
                    .abfss(JobTaskNewClusterInitScriptAbfssArgs.builder()
                        .destination("string")
                        .build())
                    .file(JobTaskNewClusterInitScriptFileArgs.builder()
                        .destination("string")
                        .build())
                    .gcs(JobTaskNewClusterInitScriptGcsArgs.builder()
                        .destination("string")
                        .build())
                    .s3(JobTaskNewClusterInitScriptS3Args.builder()
                        .destination("string")
                        .cannedAcl("string")
                        .enableEncryption(false)
                        .encryptionType("string")
                        .endpoint("string")
                        .kmsKey("string")
                        .region("string")
                        .build())
                    .volumes(JobTaskNewClusterInitScriptVolumesArgs.builder()
                        .destination("string")
                        .build())
                    .workspace(JobTaskNewClusterInitScriptWorkspaceArgs.builder()
                        .destination("string")
                        .build())
                    .build())
                .customTags(Map.of("string", "string"))
                .dataSecurityMode("string")
                .dockerImage(JobTaskNewClusterDockerImageArgs.builder()
                    .url("string")
                    .basicAuth(JobTaskNewClusterDockerImageBasicAuthArgs.builder()
                        .password("string")
                        .username("string")
                        .build())
                    .build())
                .driverInstancePoolId("string")
                .driverNodeTypeId("string")
                .enableElasticDisk(false)
                .enableLocalDiskEncryption(false)
                .workloadType(JobTaskNewClusterWorkloadTypeArgs.builder()
                    .clients(JobTaskNewClusterWorkloadTypeClientsArgs.builder()
                        .jobs(false)
                        .notebooks(false)
                        .build())
                    .build())
                .awsAttributes(JobTaskNewClusterAwsAttributesArgs.builder()
                    .availability("string")
                    .ebsVolumeCount(0)
                    .ebsVolumeIops(0)
                    .ebsVolumeSize(0)
                    .ebsVolumeThroughput(0)
                    .ebsVolumeType("string")
                    .firstOnDemand(0)
                    .instanceProfileArn("string")
                    .spotBidPricePercent(0)
                    .zoneId("string")
                    .build())
                .clusterName("string")
                .instancePoolId("string")
                .isSingleNode(false)
                .kind("string")
                .libraries(JobTaskNewClusterLibraryArgs.builder()
                    .cran(JobTaskNewClusterLibraryCranArgs.builder()
                        .package_("string")
                        .repo("string")
                        .build())
                    .egg("string")
                    .jar("string")
                    .maven(JobTaskNewClusterLibraryMavenArgs.builder()
                        .coordinates("string")
                        .exclusions("string")
                        .repo("string")
                        .build())
                    .pypi(JobTaskNewClusterLibraryPypiArgs.builder()
                        .package_("string")
                        .repo("string")
                        .build())
                    .requirements("string")
                    .whl("string")
                    .build())
                .nodeTypeId("string")
                .numWorkers(0)
                .policyId("string")
                .runtimeEngine("string")
                .singleUserName("string")
                .sparkConf(Map.of("string", "string"))
                .sparkEnvVars(Map.of("string", "string"))
                .autoscale(JobTaskNewClusterAutoscaleArgs.builder()
                    .maxWorkers(0)
                    .minWorkers(0)
                    .build())
                .applyPolicyDefaultValues(false)
                .useMlRuntime(false)
                .gcpAttributes(JobTaskNewClusterGcpAttributesArgs.builder()
                    .availability("string")
                    .bootDiskSize(0)
                    .googleServiceAccount("string")
                    .localSsdCount(0)
                    .usePreemptibleExecutors(false)
                    .zoneId("string")
                    .build())
                .build())
            .description("string")
            .notebookTask(JobTaskNotebookTaskArgs.builder()
                .notebookPath("string")
                .baseParameters(Map.of("string", "string"))
                .source("string")
                .warehouseId("string")
                .build())
            .emailNotifications(JobTaskEmailNotificationsArgs.builder()
                .noAlertForSkippedRuns(false)
                .onDurationWarningThresholdExceededs("string")
                .onFailures("string")
                .onStarts("string")
                .onStreamingBacklogExceededs("string")
                .onSuccesses("string")
                .build())
            .environmentKey("string")
            .existingClusterId("string")
            .forEachTask(JobTaskForEachTaskArgs.builder()
                .inputs("string")
                .task(JobTaskForEachTaskTaskArgs.builder()
                    .taskKey("string")
                    .newCluster(JobTaskForEachTaskTaskNewClusterArgs.builder()
                        .sparkVersion("string")
                        .idempotencyToken("string")
                        .sshPublicKeys("string")
                        .azureAttributes(JobTaskForEachTaskTaskNewClusterAzureAttributesArgs.builder()
                            .availability("string")
                            .firstOnDemand(0)
                            .logAnalyticsInfo(JobTaskForEachTaskTaskNewClusterAzureAttributesLogAnalyticsInfoArgs.builder()
                                .logAnalyticsPrimaryKey("string")
                                .logAnalyticsWorkspaceId("string")
                                .build())
                            .spotBidMaxPrice(0)
                            .build())
                        .clusterId("string")
                        .clusterLogConf(JobTaskForEachTaskTaskNewClusterClusterLogConfArgs.builder()
                            .dbfs(JobTaskForEachTaskTaskNewClusterClusterLogConfDbfsArgs.builder()
                                .destination("string")
                                .build())
                            .s3(JobTaskForEachTaskTaskNewClusterClusterLogConfS3Args.builder()
                                .destination("string")
                                .cannedAcl("string")
                                .enableEncryption(false)
                                .encryptionType("string")
                                .endpoint("string")
                                .kmsKey("string")
                                .region("string")
                                .build())
                            .build())
                        .clusterMountInfos(JobTaskForEachTaskTaskNewClusterClusterMountInfoArgs.builder()
                            .localMountDirPath("string")
                            .networkFilesystemInfo(JobTaskForEachTaskTaskNewClusterClusterMountInfoNetworkFilesystemInfoArgs.builder()
                                .serverAddress("string")
                                .mountOptions("string")
                                .build())
                            .remoteMountDirPath("string")
                            .build())
                        .initScripts(JobTaskForEachTaskTaskNewClusterInitScriptArgs.builder()
                            .abfss(JobTaskForEachTaskTaskNewClusterInitScriptAbfssArgs.builder()
                                .destination("string")
                                .build())
                            .file(JobTaskForEachTaskTaskNewClusterInitScriptFileArgs.builder()
                                .destination("string")
                                .build())
                            .gcs(JobTaskForEachTaskTaskNewClusterInitScriptGcsArgs.builder()
                                .destination("string")
                                .build())
                            .s3(JobTaskForEachTaskTaskNewClusterInitScriptS3Args.builder()
                                .destination("string")
                                .cannedAcl("string")
                                .enableEncryption(false)
                                .encryptionType("string")
                                .endpoint("string")
                                .kmsKey("string")
                                .region("string")
                                .build())
                            .volumes(JobTaskForEachTaskTaskNewClusterInitScriptVolumesArgs.builder()
                                .destination("string")
                                .build())
                            .workspace(JobTaskForEachTaskTaskNewClusterInitScriptWorkspaceArgs.builder()
                                .destination("string")
                                .build())
                            .build())
                        .customTags(Map.of("string", "string"))
                        .dataSecurityMode("string")
                        .dockerImage(JobTaskForEachTaskTaskNewClusterDockerImageArgs.builder()
                            .url("string")
                            .basicAuth(JobTaskForEachTaskTaskNewClusterDockerImageBasicAuthArgs.builder()
                                .password("string")
                                .username("string")
                                .build())
                            .build())
                        .driverInstancePoolId("string")
                        .driverNodeTypeId("string")
                        .enableElasticDisk(false)
                        .enableLocalDiskEncryption(false)
                        .workloadType(JobTaskForEachTaskTaskNewClusterWorkloadTypeArgs.builder()
                            .clients(JobTaskForEachTaskTaskNewClusterWorkloadTypeClientsArgs.builder()
                                .jobs(false)
                                .notebooks(false)
                                .build())
                            .build())
                        .awsAttributes(JobTaskForEachTaskTaskNewClusterAwsAttributesArgs.builder()
                            .availability("string")
                            .ebsVolumeCount(0)
                            .ebsVolumeIops(0)
                            .ebsVolumeSize(0)
                            .ebsVolumeThroughput(0)
                            .ebsVolumeType("string")
                            .firstOnDemand(0)
                            .instanceProfileArn("string")
                            .spotBidPricePercent(0)
                            .zoneId("string")
                            .build())
                        .clusterName("string")
                        .instancePoolId("string")
                        .isSingleNode(false)
                        .kind("string")
                        .libraries(JobTaskForEachTaskTaskNewClusterLibraryArgs.builder()
                            .cran(JobTaskForEachTaskTaskNewClusterLibraryCranArgs.builder()
                                .package_("string")
                                .repo("string")
                                .build())
                            .egg("string")
                            .jar("string")
                            .maven(JobTaskForEachTaskTaskNewClusterLibraryMavenArgs.builder()
                                .coordinates("string")
                                .exclusions("string")
                                .repo("string")
                                .build())
                            .pypi(JobTaskForEachTaskTaskNewClusterLibraryPypiArgs.builder()
                                .package_("string")
                                .repo("string")
                                .build())
                            .requirements("string")
                            .whl("string")
                            .build())
                        .nodeTypeId("string")
                        .numWorkers(0)
                        .policyId("string")
                        .runtimeEngine("string")
                        .singleUserName("string")
                        .sparkConf(Map.of("string", "string"))
                        .sparkEnvVars(Map.of("string", "string"))
                        .autoscale(JobTaskForEachTaskTaskNewClusterAutoscaleArgs.builder()
                            .maxWorkers(0)
                            .minWorkers(0)
                            .build())
                        .applyPolicyDefaultValues(false)
                        .useMlRuntime(false)
                        .gcpAttributes(JobTaskForEachTaskTaskNewClusterGcpAttributesArgs.builder()
                            .availability("string")
                            .bootDiskSize(0)
                            .googleServiceAccount("string")
                            .localSsdCount(0)
                            .usePreemptibleExecutors(false)
                            .zoneId("string")
                            .build())
                        .build())
                    .conditionTask(JobTaskForEachTaskTaskConditionTaskArgs.builder()
                        .left("string")
                        .op("string")
                        .right("string")
                        .build())
                    .dependsOns(JobTaskForEachTaskTaskDependsOnArgs.builder()
                        .taskKey("string")
                        .outcome("string")
                        .build())
                    .description("string")
                    .disableAutoOptimization(false)
                    .emailNotifications(JobTaskForEachTaskTaskEmailNotificationsArgs.builder()
                        .noAlertForSkippedRuns(false)
                        .onDurationWarningThresholdExceededs("string")
                        .onFailures("string")
                        .onStarts("string")
                        .onStreamingBacklogExceededs("string")
                        .onSuccesses("string")
                        .build())
                    .environmentKey("string")
                    .existingClusterId("string")
                    .health(JobTaskForEachTaskTaskHealthArgs.builder()
                        .rules(JobTaskForEachTaskTaskHealthRuleArgs.builder()
                            .metric("string")
                            .op("string")
                            .value(0)
                            .build())
                        .build())
                    .jobClusterKey("string")
                    .libraries(JobTaskForEachTaskTaskLibraryArgs.builder()
                        .cran(JobTaskForEachTaskTaskLibraryCranArgs.builder()
                            .package_("string")
                            .repo("string")
                            .build())
                        .egg("string")
                        .jar("string")
                        .maven(JobTaskForEachTaskTaskLibraryMavenArgs.builder()
                            .coordinates("string")
                            .exclusions("string")
                            .repo("string")
                            .build())
                        .pypi(JobTaskForEachTaskTaskLibraryPypiArgs.builder()
                            .package_("string")
                            .repo("string")
                            .build())
                        .requirements("string")
                        .whl("string")
                        .build())
                    .maxRetries(0)
                    .webhookNotifications(JobTaskForEachTaskTaskWebhookNotificationsArgs.builder()
                        .onDurationWarningThresholdExceededs(JobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs.builder()
                            .id("string")
                            .build())
                        .onFailures(JobTaskForEachTaskTaskWebhookNotificationsOnFailureArgs.builder()
                            .id("string")
                            .build())
                        .onStarts(JobTaskForEachTaskTaskWebhookNotificationsOnStartArgs.builder()
                            .id("string")
                            .build())
                        .onStreamingBacklogExceededs(JobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceededArgs.builder()
                            .id("string")
                            .build())
                        .onSuccesses(JobTaskForEachTaskTaskWebhookNotificationsOnSuccessArgs.builder()
                            .id("string")
                            .build())
                        .build())
                    .dbtTask(JobTaskForEachTaskTaskDbtTaskArgs.builder()
                        .commands("string")
                        .catalog("string")
                        .profilesDirectory("string")
                        .projectDirectory("string")
                        .schema("string")
                        .source("string")
                        .warehouseId("string")
                        .build())
                    .retryOnTimeout(false)
                    .notificationSettings(JobTaskForEachTaskTaskNotificationSettingsArgs.builder()
                        .alertOnLastAttempt(false)
                        .noAlertForCanceledRuns(false)
                        .noAlertForSkippedRuns(false)
                        .build())
                    .pipelineTask(JobTaskForEachTaskTaskPipelineTaskArgs.builder()
                        .pipelineId("string")
                        .fullRefresh(false)
                        .build())
                    .pythonWheelTask(JobTaskForEachTaskTaskPythonWheelTaskArgs.builder()
                        .entryPoint("string")
                        .namedParameters(Map.of("string", "string"))
                        .packageName("string")
                        .parameters("string")
                        .build())
                    .notebookTask(JobTaskForEachTaskTaskNotebookTaskArgs.builder()
                        .notebookPath("string")
                        .baseParameters(Map.of("string", "string"))
                        .source("string")
                        .warehouseId("string")
                        .build())
                    .runIf("string")
                    .runJobTask(JobTaskForEachTaskTaskRunJobTaskArgs.builder()
                        .jobId(0)
                        .dbtCommands("string")
                        .jarParams("string")
                        .jobParameters(Map.of("string", "string"))
                        .notebookParams(Map.of("string", "string"))
                        .pipelineParams(JobTaskForEachTaskTaskRunJobTaskPipelineParamsArgs.builder()
                            .fullRefresh(false)
                            .build())
                        .pythonNamedParams(Map.of("string", "string"))
                        .pythonParams("string")
                        .sparkSubmitParams("string")
                        .sqlParams(Map.of("string", "string"))
                        .build())
                    .sparkJarTask(JobTaskForEachTaskTaskSparkJarTaskArgs.builder()
                        .jarUri("string")
                        .mainClassName("string")
                        .parameters("string")
                        .runAsRepl(false)
                        .build())
                    .sparkPythonTask(JobTaskForEachTaskTaskSparkPythonTaskArgs.builder()
                        .pythonFile("string")
                        .parameters("string")
                        .source("string")
                        .build())
                    .sparkSubmitTask(JobTaskForEachTaskTaskSparkSubmitTaskArgs.builder()
                        .parameters("string")
                        .build())
                    .sqlTask(JobTaskForEachTaskTaskSqlTaskArgs.builder()
                        .warehouseId("string")
                        .alert(JobTaskForEachTaskTaskSqlTaskAlertArgs.builder()
                            .alertId("string")
                            .pauseSubscriptions(false)
                            .subscriptions(JobTaskForEachTaskTaskSqlTaskAlertSubscriptionArgs.builder()
                                .destinationId("string")
                                .userName("string")
                                .build())
                            .build())
                        .dashboard(JobTaskForEachTaskTaskSqlTaskDashboardArgs.builder()
                            .dashboardId("string")
                            .customSubject("string")
                            .pauseSubscriptions(false)
                            .subscriptions(JobTaskForEachTaskTaskSqlTaskDashboardSubscriptionArgs.builder()
                                .destinationId("string")
                                .userName("string")
                                .build())
                            .build())
                        .file(JobTaskForEachTaskTaskSqlTaskFileArgs.builder()
                            .path("string")
                            .source("string")
                            .build())
                        .parameters(Map.of("string", "string"))
                        .query(JobTaskForEachTaskTaskSqlTaskQueryArgs.builder()
                            .queryId("string")
                            .build())
                        .build())
                    .cleanRoomsNotebookTask(JobTaskForEachTaskTaskCleanRoomsNotebookTaskArgs.builder()
                        .cleanRoomName("string")
                        .notebookName("string")
                        .etag("string")
                        .notebookBaseParameters(Map.of("string", "string"))
                        .build())
                    .timeoutSeconds(0)
                    .minRetryIntervalMillis(0)
                    .build())
                .concurrency(0)
                .build())
            .health(JobTaskHealthArgs.builder()
                .rules(JobTaskHealthRuleArgs.builder()
                    .metric("string")
                    .op("string")
                    .value(0)
                    .build())
                .build())
            .jobClusterKey("string")
            .notificationSettings(JobTaskNotificationSettingsArgs.builder()
                .alertOnLastAttempt(false)
                .noAlertForCanceledRuns(false)
                .noAlertForSkippedRuns(false)
                .build())
            .maxRetries(0)
            .cleanRoomsNotebookTask(JobTaskCleanRoomsNotebookTaskArgs.builder()
                .cleanRoomName("string")
                .notebookName("string")
                .etag("string")
                .notebookBaseParameters(Map.of("string", "string"))
                .build())
            .dependsOns(JobTaskDependsOnArgs.builder()
                .taskKey("string")
                .outcome("string")
                .build())
            .dbtTask(JobTaskDbtTaskArgs.builder()
                .commands("string")
                .catalog("string")
                .profilesDirectory("string")
                .projectDirectory("string")
                .schema("string")
                .source("string")
                .warehouseId("string")
                .build())
            .libraries(JobTaskLibraryArgs.builder()
                .cran(JobTaskLibraryCranArgs.builder()
                    .package_("string")
                    .repo("string")
                    .build())
                .egg("string")
                .jar("string")
                .maven(JobTaskLibraryMavenArgs.builder()
                    .coordinates("string")
                    .exclusions("string")
                    .repo("string")
                    .build())
                .pypi(JobTaskLibraryPypiArgs.builder()
                    .package_("string")
                    .repo("string")
                    .build())
                .requirements("string")
                .whl("string")
                .build())
            .pipelineTask(JobTaskPipelineTaskArgs.builder()
                .pipelineId("string")
                .fullRefresh(false)
                .build())
            .pythonWheelTask(JobTaskPythonWheelTaskArgs.builder()
                .entryPoint("string")
                .namedParameters(Map.of("string", "string"))
                .packageName("string")
                .parameters("string")
                .build())
            .retryOnTimeout(false)
            .runIf("string")
            .runJobTask(JobTaskRunJobTaskArgs.builder()
                .jobId(0)
                .dbtCommands("string")
                .jarParams("string")
                .jobParameters(Map.of("string", "string"))
                .notebookParams(Map.of("string", "string"))
                .pipelineParams(JobTaskRunJobTaskPipelineParamsArgs.builder()
                    .fullRefresh(false)
                    .build())
                .pythonNamedParams(Map.of("string", "string"))
                .pythonParams("string")
                .sparkSubmitParams("string")
                .sqlParams(Map.of("string", "string"))
                .build())
            .sparkJarTask(JobTaskSparkJarTaskArgs.builder()
                .jarUri("string")
                .mainClassName("string")
                .parameters("string")
                .runAsRepl(false)
                .build())
            .sparkPythonTask(JobTaskSparkPythonTaskArgs.builder()
                .pythonFile("string")
                .parameters("string")
                .source("string")
                .build())
            .sparkSubmitTask(JobTaskSparkSubmitTaskArgs.builder()
                .parameters("string")
                .build())
            .sqlTask(JobTaskSqlTaskArgs.builder()
                .warehouseId("string")
                .alert(JobTaskSqlTaskAlertArgs.builder()
                    .alertId("string")
                    .pauseSubscriptions(false)
                    .subscriptions(JobTaskSqlTaskAlertSubscriptionArgs.builder()
                        .destinationId("string")
                        .userName("string")
                        .build())
                    .build())
                .dashboard(JobTaskSqlTaskDashboardArgs.builder()
                    .dashboardId("string")
                    .customSubject("string")
                    .pauseSubscriptions(false)
                    .subscriptions(JobTaskSqlTaskDashboardSubscriptionArgs.builder()
                        .destinationId("string")
                        .userName("string")
                        .build())
                    .build())
                .file(JobTaskSqlTaskFileArgs.builder()
                    .path("string")
                    .source("string")
                    .build())
                .parameters(Map.of("string", "string"))
                .query(JobTaskSqlTaskQueryArgs.builder()
                    .queryId("string")
                    .build())
                .build())
            .conditionTask(JobTaskConditionTaskArgs.builder()
                .left("string")
                .op("string")
                .right("string")
                .build())
            .timeoutSeconds(0)
            .webhookNotifications(JobTaskWebhookNotificationsArgs.builder()
                .onDurationWarningThresholdExceededs(JobTaskWebhookNotificationsOnDurationWarningThresholdExceededArgs.builder()
                    .id("string")
                    .build())
                .onFailures(JobTaskWebhookNotificationsOnFailureArgs.builder()
                    .id("string")
                    .build())
                .onStarts(JobTaskWebhookNotificationsOnStartArgs.builder()
                    .id("string")
                    .build())
                .onStreamingBacklogExceededs(JobTaskWebhookNotificationsOnStreamingBacklogExceededArgs.builder()
                    .id("string")
                    .build())
                .onSuccesses(JobTaskWebhookNotificationsOnSuccessArgs.builder()
                    .id("string")
                    .build())
                .build())
            .build())
        .timeoutSeconds(0)
        .trigger(JobTriggerArgs.builder()
            .fileArrival(JobTriggerFileArrivalArgs.builder()
                .url("string")
                .minTimeBetweenTriggersSeconds(0)
                .waitAfterLastChangeSeconds(0)
                .build())
            .pauseStatus("string")
            .periodic(JobTriggerPeriodicArgs.builder()
                .interval(0)
                .unit("string")
                .build())
            .table(JobTriggerTableArgs.builder()
                .condition("string")
                .minTimeBetweenTriggersSeconds(0)
                .tableNames("string")
                .waitAfterLastChangeSeconds(0)
                .build())
            .tableUpdate(JobTriggerTableUpdateArgs.builder()
                .tableNames("string")
                .condition("string")
                .minTimeBetweenTriggersSeconds(0)
                .waitAfterLastChangeSeconds(0)
                .build())
            .build())
        .webhookNotifications(JobWebhookNotificationsArgs.builder()
            .onDurationWarningThresholdExceededs(JobWebhookNotificationsOnDurationWarningThresholdExceededArgs.builder()
                .id("string")
                .build())
            .onFailures(JobWebhookNotificationsOnFailureArgs.builder()
                .id("string")
                .build())
            .onStarts(JobWebhookNotificationsOnStartArgs.builder()
                .id("string")
                .build())
            .onStreamingBacklogExceededs(JobWebhookNotificationsOnStreamingBacklogExceededArgs.builder()
                .id("string")
                .build())
            .onSuccesses(JobWebhookNotificationsOnSuccessArgs.builder()
                .id("string")
                .build())
            .build())
        .build());
    
    job_resource = databricks.Job("jobResource",
        budget_policy_id="string",
        continuous={
            "pause_status": "string",
        },
        control_run_state=False,
        deployment={
            "kind": "string",
            "metadata_file_path": "string",
        },
        description="string",
        edit_mode="string",
        email_notifications={
            "no_alert_for_skipped_runs": False,
            "on_duration_warning_threshold_exceededs": ["string"],
            "on_failures": ["string"],
            "on_starts": ["string"],
            "on_streaming_backlog_exceededs": ["string"],
            "on_successes": ["string"],
        },
        environments=[{
            "environment_key": "string",
            "spec": {
                "client": "string",
                "dependencies": ["string"],
            },
        }],
        existing_cluster_id="string",
        format="string",
        git_source={
            "url": "string",
            "branch": "string",
            "commit": "string",
            "git_snapshot": {
                "used_commit": "string",
            },
            "job_source": {
                "import_from_git_branch": "string",
                "job_config_path": "string",
                "dirty_state": "string",
            },
            "provider": "string",
            "tag": "string",
        },
        health={
            "rules": [{
                "metric": "string",
                "op": "string",
                "value": 0,
            }],
        },
        job_clusters=[{
            "job_cluster_key": "string",
            "new_cluster": {
                "spark_version": "string",
                "idempotency_token": "string",
                "ssh_public_keys": ["string"],
                "azure_attributes": {
                    "availability": "string",
                    "first_on_demand": 0,
                    "log_analytics_info": {
                        "log_analytics_primary_key": "string",
                        "log_analytics_workspace_id": "string",
                    },
                    "spot_bid_max_price": 0,
                },
                "cluster_id": "string",
                "cluster_log_conf": {
                    "dbfs": {
                        "destination": "string",
                    },
                    "s3": {
                        "destination": "string",
                        "canned_acl": "string",
                        "enable_encryption": False,
                        "encryption_type": "string",
                        "endpoint": "string",
                        "kms_key": "string",
                        "region": "string",
                    },
                },
                "cluster_mount_infos": [{
                    "local_mount_dir_path": "string",
                    "network_filesystem_info": {
                        "server_address": "string",
                        "mount_options": "string",
                    },
                    "remote_mount_dir_path": "string",
                }],
                "init_scripts": [{
                    "abfss": {
                        "destination": "string",
                    },
                    "file": {
                        "destination": "string",
                    },
                    "gcs": {
                        "destination": "string",
                    },
                    "s3": {
                        "destination": "string",
                        "canned_acl": "string",
                        "enable_encryption": False,
                        "encryption_type": "string",
                        "endpoint": "string",
                        "kms_key": "string",
                        "region": "string",
                    },
                    "volumes": {
                        "destination": "string",
                    },
                    "workspace": {
                        "destination": "string",
                    },
                }],
                "custom_tags": {
                    "string": "string",
                },
                "data_security_mode": "string",
                "docker_image": {
                    "url": "string",
                    "basic_auth": {
                        "password": "string",
                        "username": "string",
                    },
                },
                "driver_instance_pool_id": "string",
                "driver_node_type_id": "string",
                "enable_elastic_disk": False,
                "enable_local_disk_encryption": False,
                "workload_type": {
                    "clients": {
                        "jobs": False,
                        "notebooks": False,
                    },
                },
                "aws_attributes": {
                    "availability": "string",
                    "ebs_volume_count": 0,
                    "ebs_volume_iops": 0,
                    "ebs_volume_size": 0,
                    "ebs_volume_throughput": 0,
                    "ebs_volume_type": "string",
                    "first_on_demand": 0,
                    "instance_profile_arn": "string",
                    "spot_bid_price_percent": 0,
                    "zone_id": "string",
                },
                "cluster_name": "string",
                "instance_pool_id": "string",
                "is_single_node": False,
                "kind": "string",
                "libraries": [{
                    "cran": {
                        "package": "string",
                        "repo": "string",
                    },
                    "egg": "string",
                    "jar": "string",
                    "maven": {
                        "coordinates": "string",
                        "exclusions": ["string"],
                        "repo": "string",
                    },
                    "pypi": {
                        "package": "string",
                        "repo": "string",
                    },
                    "requirements": "string",
                    "whl": "string",
                }],
                "node_type_id": "string",
                "num_workers": 0,
                "policy_id": "string",
                "runtime_engine": "string",
                "single_user_name": "string",
                "spark_conf": {
                    "string": "string",
                },
                "spark_env_vars": {
                    "string": "string",
                },
                "autoscale": {
                    "max_workers": 0,
                    "min_workers": 0,
                },
                "apply_policy_default_values": False,
                "use_ml_runtime": False,
                "gcp_attributes": {
                    "availability": "string",
                    "boot_disk_size": 0,
                    "google_service_account": "string",
                    "local_ssd_count": 0,
                    "use_preemptible_executors": False,
                    "zone_id": "string",
                },
            },
        }],
        libraries=[{
            "cran": {
                "package": "string",
                "repo": "string",
            },
            "egg": "string",
            "jar": "string",
            "maven": {
                "coordinates": "string",
                "exclusions": ["string"],
                "repo": "string",
            },
            "pypi": {
                "package": "string",
                "repo": "string",
            },
            "requirements": "string",
            "whl": "string",
        }],
        max_concurrent_runs=0,
        name="string",
        new_cluster={
            "spark_version": "string",
            "idempotency_token": "string",
            "ssh_public_keys": ["string"],
            "azure_attributes": {
                "availability": "string",
                "first_on_demand": 0,
                "log_analytics_info": {
                    "log_analytics_primary_key": "string",
                    "log_analytics_workspace_id": "string",
                },
                "spot_bid_max_price": 0,
            },
            "cluster_id": "string",
            "cluster_log_conf": {
                "dbfs": {
                    "destination": "string",
                },
                "s3": {
                    "destination": "string",
                    "canned_acl": "string",
                    "enable_encryption": False,
                    "encryption_type": "string",
                    "endpoint": "string",
                    "kms_key": "string",
                    "region": "string",
                },
            },
            "cluster_mount_infos": [{
                "local_mount_dir_path": "string",
                "network_filesystem_info": {
                    "server_address": "string",
                    "mount_options": "string",
                },
                "remote_mount_dir_path": "string",
            }],
            "init_scripts": [{
                "abfss": {
                    "destination": "string",
                },
                "file": {
                    "destination": "string",
                },
                "gcs": {
                    "destination": "string",
                },
                "s3": {
                    "destination": "string",
                    "canned_acl": "string",
                    "enable_encryption": False,
                    "encryption_type": "string",
                    "endpoint": "string",
                    "kms_key": "string",
                    "region": "string",
                },
                "volumes": {
                    "destination": "string",
                },
                "workspace": {
                    "destination": "string",
                },
            }],
            "custom_tags": {
                "string": "string",
            },
            "data_security_mode": "string",
            "docker_image": {
                "url": "string",
                "basic_auth": {
                    "password": "string",
                    "username": "string",
                },
            },
            "driver_instance_pool_id": "string",
            "driver_node_type_id": "string",
            "enable_elastic_disk": False,
            "enable_local_disk_encryption": False,
            "workload_type": {
                "clients": {
                    "jobs": False,
                    "notebooks": False,
                },
            },
            "aws_attributes": {
                "availability": "string",
                "ebs_volume_count": 0,
                "ebs_volume_iops": 0,
                "ebs_volume_size": 0,
                "ebs_volume_throughput": 0,
                "ebs_volume_type": "string",
                "first_on_demand": 0,
                "instance_profile_arn": "string",
                "spot_bid_price_percent": 0,
                "zone_id": "string",
            },
            "cluster_name": "string",
            "instance_pool_id": "string",
            "is_single_node": False,
            "kind": "string",
            "libraries": [{
                "cran": {
                    "package": "string",
                    "repo": "string",
                },
                "egg": "string",
                "jar": "string",
                "maven": {
                    "coordinates": "string",
                    "exclusions": ["string"],
                    "repo": "string",
                },
                "pypi": {
                    "package": "string",
                    "repo": "string",
                },
                "requirements": "string",
                "whl": "string",
            }],
            "node_type_id": "string",
            "num_workers": 0,
            "policy_id": "string",
            "runtime_engine": "string",
            "single_user_name": "string",
            "spark_conf": {
                "string": "string",
            },
            "spark_env_vars": {
                "string": "string",
            },
            "autoscale": {
                "max_workers": 0,
                "min_workers": 0,
            },
            "apply_policy_default_values": False,
            "use_ml_runtime": False,
            "gcp_attributes": {
                "availability": "string",
                "boot_disk_size": 0,
                "google_service_account": "string",
                "local_ssd_count": 0,
                "use_preemptible_executors": False,
                "zone_id": "string",
            },
        },
        notification_settings={
            "no_alert_for_canceled_runs": False,
            "no_alert_for_skipped_runs": False,
        },
        parameters=[{
            "default": "string",
            "name": "string",
        }],
        queue={
            "enabled": False,
        },
        run_as={
            "service_principal_name": "string",
            "user_name": "string",
        },
        schedule={
            "quartz_cron_expression": "string",
            "timezone_id": "string",
            "pause_status": "string",
        },
        tags={
            "string": "string",
        },
        tasks=[{
            "task_key": "string",
            "min_retry_interval_millis": 0,
            "disable_auto_optimization": False,
            "new_cluster": {
                "spark_version": "string",
                "idempotency_token": "string",
                "ssh_public_keys": ["string"],
                "azure_attributes": {
                    "availability": "string",
                    "first_on_demand": 0,
                    "log_analytics_info": {
                        "log_analytics_primary_key": "string",
                        "log_analytics_workspace_id": "string",
                    },
                    "spot_bid_max_price": 0,
                },
                "cluster_id": "string",
                "cluster_log_conf": {
                    "dbfs": {
                        "destination": "string",
                    },
                    "s3": {
                        "destination": "string",
                        "canned_acl": "string",
                        "enable_encryption": False,
                        "encryption_type": "string",
                        "endpoint": "string",
                        "kms_key": "string",
                        "region": "string",
                    },
                },
                "cluster_mount_infos": [{
                    "local_mount_dir_path": "string",
                    "network_filesystem_info": {
                        "server_address": "string",
                        "mount_options": "string",
                    },
                    "remote_mount_dir_path": "string",
                }],
                "init_scripts": [{
                    "abfss": {
                        "destination": "string",
                    },
                    "file": {
                        "destination": "string",
                    },
                    "gcs": {
                        "destination": "string",
                    },
                    "s3": {
                        "destination": "string",
                        "canned_acl": "string",
                        "enable_encryption": False,
                        "encryption_type": "string",
                        "endpoint": "string",
                        "kms_key": "string",
                        "region": "string",
                    },
                    "volumes": {
                        "destination": "string",
                    },
                    "workspace": {
                        "destination": "string",
                    },
                }],
                "custom_tags": {
                    "string": "string",
                },
                "data_security_mode": "string",
                "docker_image": {
                    "url": "string",
                    "basic_auth": {
                        "password": "string",
                        "username": "string",
                    },
                },
                "driver_instance_pool_id": "string",
                "driver_node_type_id": "string",
                "enable_elastic_disk": False,
                "enable_local_disk_encryption": False,
                "workload_type": {
                    "clients": {
                        "jobs": False,
                        "notebooks": False,
                    },
                },
                "aws_attributes": {
                    "availability": "string",
                    "ebs_volume_count": 0,
                    "ebs_volume_iops": 0,
                    "ebs_volume_size": 0,
                    "ebs_volume_throughput": 0,
                    "ebs_volume_type": "string",
                    "first_on_demand": 0,
                    "instance_profile_arn": "string",
                    "spot_bid_price_percent": 0,
                    "zone_id": "string",
                },
                "cluster_name": "string",
                "instance_pool_id": "string",
                "is_single_node": False,
                "kind": "string",
                "libraries": [{
                    "cran": {
                        "package": "string",
                        "repo": "string",
                    },
                    "egg": "string",
                    "jar": "string",
                    "maven": {
                        "coordinates": "string",
                        "exclusions": ["string"],
                        "repo": "string",
                    },
                    "pypi": {
                        "package": "string",
                        "repo": "string",
                    },
                    "requirements": "string",
                    "whl": "string",
                }],
                "node_type_id": "string",
                "num_workers": 0,
                "policy_id": "string",
                "runtime_engine": "string",
                "single_user_name": "string",
                "spark_conf": {
                    "string": "string",
                },
                "spark_env_vars": {
                    "string": "string",
                },
                "autoscale": {
                    "max_workers": 0,
                    "min_workers": 0,
                },
                "apply_policy_default_values": False,
                "use_ml_runtime": False,
                "gcp_attributes": {
                    "availability": "string",
                    "boot_disk_size": 0,
                    "google_service_account": "string",
                    "local_ssd_count": 0,
                    "use_preemptible_executors": False,
                    "zone_id": "string",
                },
            },
            "description": "string",
            "notebook_task": {
                "notebook_path": "string",
                "base_parameters": {
                    "string": "string",
                },
                "source": "string",
                "warehouse_id": "string",
            },
            "email_notifications": {
                "no_alert_for_skipped_runs": False,
                "on_duration_warning_threshold_exceededs": ["string"],
                "on_failures": ["string"],
                "on_starts": ["string"],
                "on_streaming_backlog_exceededs": ["string"],
                "on_successes": ["string"],
            },
            "environment_key": "string",
            "existing_cluster_id": "string",
            "for_each_task": {
                "inputs": "string",
                "task": {
                    "task_key": "string",
                    "new_cluster": {
                        "spark_version": "string",
                        "idempotency_token": "string",
                        "ssh_public_keys": ["string"],
                        "azure_attributes": {
                            "availability": "string",
                            "first_on_demand": 0,
                            "log_analytics_info": {
                                "log_analytics_primary_key": "string",
                                "log_analytics_workspace_id": "string",
                            },
                            "spot_bid_max_price": 0,
                        },
                        "cluster_id": "string",
                        "cluster_log_conf": {
                            "dbfs": {
                                "destination": "string",
                            },
                            "s3": {
                                "destination": "string",
                                "canned_acl": "string",
                                "enable_encryption": False,
                                "encryption_type": "string",
                                "endpoint": "string",
                                "kms_key": "string",
                                "region": "string",
                            },
                        },
                        "cluster_mount_infos": [{
                            "local_mount_dir_path": "string",
                            "network_filesystem_info": {
                                "server_address": "string",
                                "mount_options": "string",
                            },
                            "remote_mount_dir_path": "string",
                        }],
                        "init_scripts": [{
                            "abfss": {
                                "destination": "string",
                            },
                            "file": {
                                "destination": "string",
                            },
                            "gcs": {
                                "destination": "string",
                            },
                            "s3": {
                                "destination": "string",
                                "canned_acl": "string",
                                "enable_encryption": False,
                                "encryption_type": "string",
                                "endpoint": "string",
                                "kms_key": "string",
                                "region": "string",
                            },
                            "volumes": {
                                "destination": "string",
                            },
                            "workspace": {
                                "destination": "string",
                            },
                        }],
                        "custom_tags": {
                            "string": "string",
                        },
                        "data_security_mode": "string",
                        "docker_image": {
                            "url": "string",
                            "basic_auth": {
                                "password": "string",
                                "username": "string",
                            },
                        },
                        "driver_instance_pool_id": "string",
                        "driver_node_type_id": "string",
                        "enable_elastic_disk": False,
                        "enable_local_disk_encryption": False,
                        "workload_type": {
                            "clients": {
                                "jobs": False,
                                "notebooks": False,
                            },
                        },
                        "aws_attributes": {
                            "availability": "string",
                            "ebs_volume_count": 0,
                            "ebs_volume_iops": 0,
                            "ebs_volume_size": 0,
                            "ebs_volume_throughput": 0,
                            "ebs_volume_type": "string",
                            "first_on_demand": 0,
                            "instance_profile_arn": "string",
                            "spot_bid_price_percent": 0,
                            "zone_id": "string",
                        },
                        "cluster_name": "string",
                        "instance_pool_id": "string",
                        "is_single_node": False,
                        "kind": "string",
                        "libraries": [{
                            "cran": {
                                "package": "string",
                                "repo": "string",
                            },
                            "egg": "string",
                            "jar": "string",
                            "maven": {
                                "coordinates": "string",
                                "exclusions": ["string"],
                                "repo": "string",
                            },
                            "pypi": {
                                "package": "string",
                                "repo": "string",
                            },
                            "requirements": "string",
                            "whl": "string",
                        }],
                        "node_type_id": "string",
                        "num_workers": 0,
                        "policy_id": "string",
                        "runtime_engine": "string",
                        "single_user_name": "string",
                        "spark_conf": {
                            "string": "string",
                        },
                        "spark_env_vars": {
                            "string": "string",
                        },
                        "autoscale": {
                            "max_workers": 0,
                            "min_workers": 0,
                        },
                        "apply_policy_default_values": False,
                        "use_ml_runtime": False,
                        "gcp_attributes": {
                            "availability": "string",
                            "boot_disk_size": 0,
                            "google_service_account": "string",
                            "local_ssd_count": 0,
                            "use_preemptible_executors": False,
                            "zone_id": "string",
                        },
                    },
                    "condition_task": {
                        "left": "string",
                        "op": "string",
                        "right": "string",
                    },
                    "depends_ons": [{
                        "task_key": "string",
                        "outcome": "string",
                    }],
                    "description": "string",
                    "disable_auto_optimization": False,
                    "email_notifications": {
                        "no_alert_for_skipped_runs": False,
                        "on_duration_warning_threshold_exceededs": ["string"],
                        "on_failures": ["string"],
                        "on_starts": ["string"],
                        "on_streaming_backlog_exceededs": ["string"],
                        "on_successes": ["string"],
                    },
                    "environment_key": "string",
                    "existing_cluster_id": "string",
                    "health": {
                        "rules": [{
                            "metric": "string",
                            "op": "string",
                            "value": 0,
                        }],
                    },
                    "job_cluster_key": "string",
                    "libraries": [{
                        "cran": {
                            "package": "string",
                            "repo": "string",
                        },
                        "egg": "string",
                        "jar": "string",
                        "maven": {
                            "coordinates": "string",
                            "exclusions": ["string"],
                            "repo": "string",
                        },
                        "pypi": {
                            "package": "string",
                            "repo": "string",
                        },
                        "requirements": "string",
                        "whl": "string",
                    }],
                    "max_retries": 0,
                    "webhook_notifications": {
                        "on_duration_warning_threshold_exceededs": [{
                            "id": "string",
                        }],
                        "on_failures": [{
                            "id": "string",
                        }],
                        "on_starts": [{
                            "id": "string",
                        }],
                        "on_streaming_backlog_exceededs": [{
                            "id": "string",
                        }],
                        "on_successes": [{
                            "id": "string",
                        }],
                    },
                    "dbt_task": {
                        "commands": ["string"],
                        "catalog": "string",
                        "profiles_directory": "string",
                        "project_directory": "string",
                        "schema": "string",
                        "source": "string",
                        "warehouse_id": "string",
                    },
                    "retry_on_timeout": False,
                    "notification_settings": {
                        "alert_on_last_attempt": False,
                        "no_alert_for_canceled_runs": False,
                        "no_alert_for_skipped_runs": False,
                    },
                    "pipeline_task": {
                        "pipeline_id": "string",
                        "full_refresh": False,
                    },
                    "python_wheel_task": {
                        "entry_point": "string",
                        "named_parameters": {
                            "string": "string",
                        },
                        "package_name": "string",
                        "parameters": ["string"],
                    },
                    "notebook_task": {
                        "notebook_path": "string",
                        "base_parameters": {
                            "string": "string",
                        },
                        "source": "string",
                        "warehouse_id": "string",
                    },
                    "run_if": "string",
                    "run_job_task": {
                        "job_id": 0,
                        "dbt_commands": ["string"],
                        "jar_params": ["string"],
                        "job_parameters": {
                            "string": "string",
                        },
                        "notebook_params": {
                            "string": "string",
                        },
                        "pipeline_params": {
                            "full_refresh": False,
                        },
                        "python_named_params": {
                            "string": "string",
                        },
                        "python_params": ["string"],
                        "spark_submit_params": ["string"],
                        "sql_params": {
                            "string": "string",
                        },
                    },
                    "spark_jar_task": {
                        "jar_uri": "string",
                        "main_class_name": "string",
                        "parameters": ["string"],
                        "run_as_repl": False,
                    },
                    "spark_python_task": {
                        "python_file": "string",
                        "parameters": ["string"],
                        "source": "string",
                    },
                    "spark_submit_task": {
                        "parameters": ["string"],
                    },
                    "sql_task": {
                        "warehouse_id": "string",
                        "alert": {
                            "alert_id": "string",
                            "pause_subscriptions": False,
                            "subscriptions": [{
                                "destination_id": "string",
                                "user_name": "string",
                            }],
                        },
                        "dashboard": {
                            "dashboard_id": "string",
                            "custom_subject": "string",
                            "pause_subscriptions": False,
                            "subscriptions": [{
                                "destination_id": "string",
                                "user_name": "string",
                            }],
                        },
                        "file": {
                            "path": "string",
                            "source": "string",
                        },
                        "parameters": {
                            "string": "string",
                        },
                        "query": {
                            "query_id": "string",
                        },
                    },
                    "clean_rooms_notebook_task": {
                        "clean_room_name": "string",
                        "notebook_name": "string",
                        "etag": "string",
                        "notebook_base_parameters": {
                            "string": "string",
                        },
                    },
                    "timeout_seconds": 0,
                    "min_retry_interval_millis": 0,
                },
                "concurrency": 0,
            },
            "health": {
                "rules": [{
                    "metric": "string",
                    "op": "string",
                    "value": 0,
                }],
            },
            "job_cluster_key": "string",
            "notification_settings": {
                "alert_on_last_attempt": False,
                "no_alert_for_canceled_runs": False,
                "no_alert_for_skipped_runs": False,
            },
            "max_retries": 0,
            "clean_rooms_notebook_task": {
                "clean_room_name": "string",
                "notebook_name": "string",
                "etag": "string",
                "notebook_base_parameters": {
                    "string": "string",
                },
            },
            "depends_ons": [{
                "task_key": "string",
                "outcome": "string",
            }],
            "dbt_task": {
                "commands": ["string"],
                "catalog": "string",
                "profiles_directory": "string",
                "project_directory": "string",
                "schema": "string",
                "source": "string",
                "warehouse_id": "string",
            },
            "libraries": [{
                "cran": {
                    "package": "string",
                    "repo": "string",
                },
                "egg": "string",
                "jar": "string",
                "maven": {
                    "coordinates": "string",
                    "exclusions": ["string"],
                    "repo": "string",
                },
                "pypi": {
                    "package": "string",
                    "repo": "string",
                },
                "requirements": "string",
                "whl": "string",
            }],
            "pipeline_task": {
                "pipeline_id": "string",
                "full_refresh": False,
            },
            "python_wheel_task": {
                "entry_point": "string",
                "named_parameters": {
                    "string": "string",
                },
                "package_name": "string",
                "parameters": ["string"],
            },
            "retry_on_timeout": False,
            "run_if": "string",
            "run_job_task": {
                "job_id": 0,
                "dbt_commands": ["string"],
                "jar_params": ["string"],
                "job_parameters": {
                    "string": "string",
                },
                "notebook_params": {
                    "string": "string",
                },
                "pipeline_params": {
                    "full_refresh": False,
                },
                "python_named_params": {
                    "string": "string",
                },
                "python_params": ["string"],
                "spark_submit_params": ["string"],
                "sql_params": {
                    "string": "string",
                },
            },
            "spark_jar_task": {
                "jar_uri": "string",
                "main_class_name": "string",
                "parameters": ["string"],
                "run_as_repl": False,
            },
            "spark_python_task": {
                "python_file": "string",
                "parameters": ["string"],
                "source": "string",
            },
            "spark_submit_task": {
                "parameters": ["string"],
            },
            "sql_task": {
                "warehouse_id": "string",
                "alert": {
                    "alert_id": "string",
                    "pause_subscriptions": False,
                    "subscriptions": [{
                        "destination_id": "string",
                        "user_name": "string",
                    }],
                },
                "dashboard": {
                    "dashboard_id": "string",
                    "custom_subject": "string",
                    "pause_subscriptions": False,
                    "subscriptions": [{
                        "destination_id": "string",
                        "user_name": "string",
                    }],
                },
                "file": {
                    "path": "string",
                    "source": "string",
                },
                "parameters": {
                    "string": "string",
                },
                "query": {
                    "query_id": "string",
                },
            },
            "condition_task": {
                "left": "string",
                "op": "string",
                "right": "string",
            },
            "timeout_seconds": 0,
            "webhook_notifications": {
                "on_duration_warning_threshold_exceededs": [{
                    "id": "string",
                }],
                "on_failures": [{
                    "id": "string",
                }],
                "on_starts": [{
                    "id": "string",
                }],
                "on_streaming_backlog_exceededs": [{
                    "id": "string",
                }],
                "on_successes": [{
                    "id": "string",
                }],
            },
        }],
        timeout_seconds=0,
        trigger={
            "file_arrival": {
                "url": "string",
                "min_time_between_triggers_seconds": 0,
                "wait_after_last_change_seconds": 0,
            },
            "pause_status": "string",
            "periodic": {
                "interval": 0,
                "unit": "string",
            },
            "table": {
                "condition": "string",
                "min_time_between_triggers_seconds": 0,
                "table_names": ["string"],
                "wait_after_last_change_seconds": 0,
            },
            "table_update": {
                "table_names": ["string"],
                "condition": "string",
                "min_time_between_triggers_seconds": 0,
                "wait_after_last_change_seconds": 0,
            },
        },
        webhook_notifications={
            "on_duration_warning_threshold_exceededs": [{
                "id": "string",
            }],
            "on_failures": [{
                "id": "string",
            }],
            "on_starts": [{
                "id": "string",
            }],
            "on_streaming_backlog_exceededs": [{
                "id": "string",
            }],
            "on_successes": [{
                "id": "string",
            }],
        })
    
    const jobResource = new databricks.Job("jobResource", {
        budgetPolicyId: "string",
        continuous: {
            pauseStatus: "string",
        },
        controlRunState: false,
        deployment: {
            kind: "string",
            metadataFilePath: "string",
        },
        description: "string",
        editMode: "string",
        emailNotifications: {
            noAlertForSkippedRuns: false,
            onDurationWarningThresholdExceededs: ["string"],
            onFailures: ["string"],
            onStarts: ["string"],
            onStreamingBacklogExceededs: ["string"],
            onSuccesses: ["string"],
        },
        environments: [{
            environmentKey: "string",
            spec: {
                client: "string",
                dependencies: ["string"],
            },
        }],
        existingClusterId: "string",
        format: "string",
        gitSource: {
            url: "string",
            branch: "string",
            commit: "string",
            gitSnapshot: {
                usedCommit: "string",
            },
            jobSource: {
                importFromGitBranch: "string",
                jobConfigPath: "string",
                dirtyState: "string",
            },
            provider: "string",
            tag: "string",
        },
        health: {
            rules: [{
                metric: "string",
                op: "string",
                value: 0,
            }],
        },
        jobClusters: [{
            jobClusterKey: "string",
            newCluster: {
                sparkVersion: "string",
                idempotencyToken: "string",
                sshPublicKeys: ["string"],
                azureAttributes: {
                    availability: "string",
                    firstOnDemand: 0,
                    logAnalyticsInfo: {
                        logAnalyticsPrimaryKey: "string",
                        logAnalyticsWorkspaceId: "string",
                    },
                    spotBidMaxPrice: 0,
                },
                clusterId: "string",
                clusterLogConf: {
                    dbfs: {
                        destination: "string",
                    },
                    s3: {
                        destination: "string",
                        cannedAcl: "string",
                        enableEncryption: false,
                        encryptionType: "string",
                        endpoint: "string",
                        kmsKey: "string",
                        region: "string",
                    },
                },
                clusterMountInfos: [{
                    localMountDirPath: "string",
                    networkFilesystemInfo: {
                        serverAddress: "string",
                        mountOptions: "string",
                    },
                    remoteMountDirPath: "string",
                }],
                initScripts: [{
                    abfss: {
                        destination: "string",
                    },
                    file: {
                        destination: "string",
                    },
                    gcs: {
                        destination: "string",
                    },
                    s3: {
                        destination: "string",
                        cannedAcl: "string",
                        enableEncryption: false,
                        encryptionType: "string",
                        endpoint: "string",
                        kmsKey: "string",
                        region: "string",
                    },
                    volumes: {
                        destination: "string",
                    },
                    workspace: {
                        destination: "string",
                    },
                }],
                customTags: {
                    string: "string",
                },
                dataSecurityMode: "string",
                dockerImage: {
                    url: "string",
                    basicAuth: {
                        password: "string",
                        username: "string",
                    },
                },
                driverInstancePoolId: "string",
                driverNodeTypeId: "string",
                enableElasticDisk: false,
                enableLocalDiskEncryption: false,
                workloadType: