1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. dataloss
  5. PreventionJobTrigger
Google Cloud Classic v7.16.0 published on Wednesday, Mar 27, 2024 by Pulumi

gcp.dataloss.PreventionJobTrigger

Explore with Pulumi AI

gcp logo
Google Cloud Classic v7.16.0 published on Wednesday, Mar 27, 2024 by Pulumi

    A job trigger configuration.

    To get more information about JobTrigger, see:

    Example Usage

    Dlp Job Trigger Basic

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const basic = new gcp.dataloss.PreventionJobTrigger("basic", {
        parent: "projects/my-project-name",
        description: "Description",
        displayName: "Displayname",
        triggers: [{
            schedule: {
                recurrencePeriodDuration: "86400s",
            },
        }],
        inspectJob: {
            inspectTemplateName: "fake",
            actions: [{
                saveFindings: {
                    outputConfig: {
                        table: {
                            projectId: "project",
                            datasetId: "dataset",
                        },
                    },
                },
            }],
            storageConfig: {
                cloudStorageOptions: {
                    fileSet: {
                        url: "gs://mybucket/directory/",
                    },
                },
            },
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    basic = gcp.dataloss.PreventionJobTrigger("basic",
        parent="projects/my-project-name",
        description="Description",
        display_name="Displayname",
        triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
            schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
                recurrence_period_duration="86400s",
            ),
        )],
        inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
            inspect_template_name="fake",
            actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
                save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
                    output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
                        table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
                            project_id="project",
                            dataset_id="dataset",
                        ),
                    ),
                ),
            )],
            storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
                cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
                    file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
                        url="gs://mybucket/directory/",
                    ),
                ),
            ),
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dataloss.NewPreventionJobTrigger(ctx, "basic", &dataloss.PreventionJobTriggerArgs{
    			Parent:      pulumi.String("projects/my-project-name"),
    			Description: pulumi.String("Description"),
    			DisplayName: pulumi.String("Displayname"),
    			Triggers: dataloss.PreventionJobTriggerTriggerArray{
    				&dataloss.PreventionJobTriggerTriggerArgs{
    					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
    						RecurrencePeriodDuration: pulumi.String("86400s"),
    					},
    				},
    			},
    			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
    				InspectTemplateName: pulumi.String("fake"),
    				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
    					&dataloss.PreventionJobTriggerInspectJobActionArgs{
    						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
    							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
    								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
    									ProjectId: pulumi.String("project"),
    									DatasetId: pulumi.String("dataset"),
    								},
    							},
    						},
    					},
    				},
    				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
    					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
    						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
    							Url: pulumi.String("gs://mybucket/directory/"),
    						},
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var basic = new Gcp.DataLoss.PreventionJobTrigger("basic", new()
        {
            Parent = "projects/my-project-name",
            Description = "Description",
            DisplayName = "Displayname",
            Triggers = new[]
            {
                new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
                {
                    Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
                    {
                        RecurrencePeriodDuration = "86400s",
                    },
                },
            },
            InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
            {
                InspectTemplateName = "fake",
                Actions = new[]
                {
                    new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
                    {
                        SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
                        {
                            OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
                            {
                                Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
                                {
                                    ProjectId = "project",
                                    DatasetId = "dataset",
                                },
                            },
                        },
                    },
                },
                StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
                {
                    CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
                    {
                        FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
                        {
                            Url = "gs://mybucket/directory/",
                        },
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataloss.PreventionJobTrigger;
    import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var basic = new PreventionJobTrigger("basic", PreventionJobTriggerArgs.builder()        
                .parent("projects/my-project-name")
                .description("Description")
                .displayName("Displayname")
                .triggers(PreventionJobTriggerTriggerArgs.builder()
                    .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
                        .recurrencePeriodDuration("86400s")
                        .build())
                    .build())
                .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
                    .inspectTemplateName("fake")
                    .actions(PreventionJobTriggerInspectJobActionArgs.builder()
                        .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
                            .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
                                .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
                                    .projectId("project")
                                    .datasetId("dataset")
                                    .build())
                                .build())
                            .build())
                        .build())
                    .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
                        .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
                            .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
                                .url("gs://mybucket/directory/")
                                .build())
                            .build())
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      basic:
        type: gcp:dataloss:PreventionJobTrigger
        properties:
          parent: projects/my-project-name
          description: Description
          displayName: Displayname
          triggers:
            - schedule:
                recurrencePeriodDuration: 86400s
          inspectJob:
            inspectTemplateName: fake
            actions:
              - saveFindings:
                  outputConfig:
                    table:
                      projectId: project
                      datasetId: dataset
            storageConfig:
              cloudStorageOptions:
                fileSet:
                  url: gs://mybucket/directory/
    

    Dlp Job Trigger Bigquery Row Limit

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const bigqueryRowLimit = new gcp.dataloss.PreventionJobTrigger("bigquery_row_limit", {
        parent: "projects/my-project-name",
        description: "Description",
        displayName: "Displayname",
        triggers: [{
            schedule: {
                recurrencePeriodDuration: "86400s",
            },
        }],
        inspectJob: {
            inspectTemplateName: "fake",
            actions: [{
                saveFindings: {
                    outputConfig: {
                        table: {
                            projectId: "project",
                            datasetId: "dataset",
                        },
                    },
                },
            }],
            storageConfig: {
                bigQueryOptions: {
                    tableReference: {
                        projectId: "project",
                        datasetId: "dataset",
                        tableId: "table_to_scan",
                    },
                    rowsLimit: 1000,
                    sampleMethod: "RANDOM_START",
                },
            },
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    bigquery_row_limit = gcp.dataloss.PreventionJobTrigger("bigquery_row_limit",
        parent="projects/my-project-name",
        description="Description",
        display_name="Displayname",
        triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
            schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
                recurrence_period_duration="86400s",
            ),
        )],
        inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
            inspect_template_name="fake",
            actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
                save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
                    output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
                        table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
                            project_id="project",
                            dataset_id="dataset",
                        ),
                    ),
                ),
            )],
            storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
                big_query_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs(
                    table_reference=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs(
                        project_id="project",
                        dataset_id="dataset",
                        table_id="table_to_scan",
                    ),
                    rows_limit=1000,
                    sample_method="RANDOM_START",
                ),
            ),
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dataloss.NewPreventionJobTrigger(ctx, "bigquery_row_limit", &dataloss.PreventionJobTriggerArgs{
    			Parent:      pulumi.String("projects/my-project-name"),
    			Description: pulumi.String("Description"),
    			DisplayName: pulumi.String("Displayname"),
    			Triggers: dataloss.PreventionJobTriggerTriggerArray{
    				&dataloss.PreventionJobTriggerTriggerArgs{
    					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
    						RecurrencePeriodDuration: pulumi.String("86400s"),
    					},
    				},
    			},
    			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
    				InspectTemplateName: pulumi.String("fake"),
    				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
    					&dataloss.PreventionJobTriggerInspectJobActionArgs{
    						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
    							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
    								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
    									ProjectId: pulumi.String("project"),
    									DatasetId: pulumi.String("dataset"),
    								},
    							},
    						},
    					},
    				},
    				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
    					BigQueryOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs{
    						TableReference: &dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs{
    							ProjectId: pulumi.String("project"),
    							DatasetId: pulumi.String("dataset"),
    							TableId:   pulumi.String("table_to_scan"),
    						},
    						RowsLimit:    pulumi.Int(1000),
    						SampleMethod: pulumi.String("RANDOM_START"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var bigqueryRowLimit = new Gcp.DataLoss.PreventionJobTrigger("bigquery_row_limit", new()
        {
            Parent = "projects/my-project-name",
            Description = "Description",
            DisplayName = "Displayname",
            Triggers = new[]
            {
                new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
                {
                    Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
                    {
                        RecurrencePeriodDuration = "86400s",
                    },
                },
            },
            InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
            {
                InspectTemplateName = "fake",
                Actions = new[]
                {
                    new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
                    {
                        SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
                        {
                            OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
                            {
                                Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
                                {
                                    ProjectId = "project",
                                    DatasetId = "dataset",
                                },
                            },
                        },
                    },
                },
                StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
                {
                    BigQueryOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs
                    {
                        TableReference = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs
                        {
                            ProjectId = "project",
                            DatasetId = "dataset",
                            TableId = "table_to_scan",
                        },
                        RowsLimit = 1000,
                        SampleMethod = "RANDOM_START",
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataloss.PreventionJobTrigger;
    import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var bigqueryRowLimit = new PreventionJobTrigger("bigqueryRowLimit", PreventionJobTriggerArgs.builder()        
                .parent("projects/my-project-name")
                .description("Description")
                .displayName("Displayname")
                .triggers(PreventionJobTriggerTriggerArgs.builder()
                    .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
                        .recurrencePeriodDuration("86400s")
                        .build())
                    .build())
                .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
                    .inspectTemplateName("fake")
                    .actions(PreventionJobTriggerInspectJobActionArgs.builder()
                        .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
                            .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
                                .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
                                    .projectId("project")
                                    .datasetId("dataset")
                                    .build())
                                .build())
                            .build())
                        .build())
                    .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
                        .bigQueryOptions(PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs.builder()
                            .tableReference(PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs.builder()
                                .projectId("project")
                                .datasetId("dataset")
                                .tableId("table_to_scan")
                                .build())
                            .rowsLimit(1000)
                            .sampleMethod("RANDOM_START")
                            .build())
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      bigqueryRowLimit:
        type: gcp:dataloss:PreventionJobTrigger
        name: bigquery_row_limit
        properties:
          parent: projects/my-project-name
          description: Description
          displayName: Displayname
          triggers:
            - schedule:
                recurrencePeriodDuration: 86400s
          inspectJob:
            inspectTemplateName: fake
            actions:
              - saveFindings:
                  outputConfig:
                    table:
                      projectId: project
                      datasetId: dataset
            storageConfig:
              bigQueryOptions:
                tableReference:
                  projectId: project
                  datasetId: dataset
                  tableId: table_to_scan
                rowsLimit: 1000
                sampleMethod: RANDOM_START
    

    Dlp Job Trigger Bigquery Row Limit Percentage

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const bigqueryRowLimitPercentage = new gcp.dataloss.PreventionJobTrigger("bigquery_row_limit_percentage", {
        parent: "projects/my-project-name",
        description: "Description",
        displayName: "Displayname",
        triggers: [{
            schedule: {
                recurrencePeriodDuration: "86400s",
            },
        }],
        inspectJob: {
            inspectTemplateName: "fake",
            actions: [{
                saveFindings: {
                    outputConfig: {
                        table: {
                            projectId: "project",
                            datasetId: "dataset",
                        },
                    },
                },
            }],
            storageConfig: {
                bigQueryOptions: {
                    tableReference: {
                        projectId: "project",
                        datasetId: "dataset",
                        tableId: "table_to_scan",
                    },
                    rowsLimitPercent: 50,
                    sampleMethod: "RANDOM_START",
                },
            },
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    bigquery_row_limit_percentage = gcp.dataloss.PreventionJobTrigger("bigquery_row_limit_percentage",
        parent="projects/my-project-name",
        description="Description",
        display_name="Displayname",
        triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
            schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
                recurrence_period_duration="86400s",
            ),
        )],
        inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
            inspect_template_name="fake",
            actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
                save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
                    output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
                        table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
                            project_id="project",
                            dataset_id="dataset",
                        ),
                    ),
                ),
            )],
            storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
                big_query_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs(
                    table_reference=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs(
                        project_id="project",
                        dataset_id="dataset",
                        table_id="table_to_scan",
                    ),
                    rows_limit_percent=50,
                    sample_method="RANDOM_START",
                ),
            ),
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dataloss.NewPreventionJobTrigger(ctx, "bigquery_row_limit_percentage", &dataloss.PreventionJobTriggerArgs{
    			Parent:      pulumi.String("projects/my-project-name"),
    			Description: pulumi.String("Description"),
    			DisplayName: pulumi.String("Displayname"),
    			Triggers: dataloss.PreventionJobTriggerTriggerArray{
    				&dataloss.PreventionJobTriggerTriggerArgs{
    					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
    						RecurrencePeriodDuration: pulumi.String("86400s"),
    					},
    				},
    			},
    			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
    				InspectTemplateName: pulumi.String("fake"),
    				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
    					&dataloss.PreventionJobTriggerInspectJobActionArgs{
    						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
    							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
    								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
    									ProjectId: pulumi.String("project"),
    									DatasetId: pulumi.String("dataset"),
    								},
    							},
    						},
    					},
    				},
    				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
    					BigQueryOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs{
    						TableReference: &dataloss.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs{
    							ProjectId: pulumi.String("project"),
    							DatasetId: pulumi.String("dataset"),
    							TableId:   pulumi.String("table_to_scan"),
    						},
    						RowsLimitPercent: pulumi.Int(50),
    						SampleMethod:     pulumi.String("RANDOM_START"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var bigqueryRowLimitPercentage = new Gcp.DataLoss.PreventionJobTrigger("bigquery_row_limit_percentage", new()
        {
            Parent = "projects/my-project-name",
            Description = "Description",
            DisplayName = "Displayname",
            Triggers = new[]
            {
                new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
                {
                    Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
                    {
                        RecurrencePeriodDuration = "86400s",
                    },
                },
            },
            InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
            {
                InspectTemplateName = "fake",
                Actions = new[]
                {
                    new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
                    {
                        SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
                        {
                            OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
                            {
                                Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
                                {
                                    ProjectId = "project",
                                    DatasetId = "dataset",
                                },
                            },
                        },
                    },
                },
                StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
                {
                    BigQueryOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs
                    {
                        TableReference = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs
                        {
                            ProjectId = "project",
                            DatasetId = "dataset",
                            TableId = "table_to_scan",
                        },
                        RowsLimitPercent = 50,
                        SampleMethod = "RANDOM_START",
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataloss.PreventionJobTrigger;
    import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var bigqueryRowLimitPercentage = new PreventionJobTrigger("bigqueryRowLimitPercentage", PreventionJobTriggerArgs.builder()        
                .parent("projects/my-project-name")
                .description("Description")
                .displayName("Displayname")
                .triggers(PreventionJobTriggerTriggerArgs.builder()
                    .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
                        .recurrencePeriodDuration("86400s")
                        .build())
                    .build())
                .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
                    .inspectTemplateName("fake")
                    .actions(PreventionJobTriggerInspectJobActionArgs.builder()
                        .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
                            .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
                                .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
                                    .projectId("project")
                                    .datasetId("dataset")
                                    .build())
                                .build())
                            .build())
                        .build())
                    .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
                        .bigQueryOptions(PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs.builder()
                            .tableReference(PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs.builder()
                                .projectId("project")
                                .datasetId("dataset")
                                .tableId("table_to_scan")
                                .build())
                            .rowsLimitPercent(50)
                            .sampleMethod("RANDOM_START")
                            .build())
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      bigqueryRowLimitPercentage:
        type: gcp:dataloss:PreventionJobTrigger
        name: bigquery_row_limit_percentage
        properties:
          parent: projects/my-project-name
          description: Description
          displayName: Displayname
          triggers:
            - schedule:
                recurrencePeriodDuration: 86400s
          inspectJob:
            inspectTemplateName: fake
            actions:
              - saveFindings:
                  outputConfig:
                    table:
                      projectId: project
                      datasetId: dataset
            storageConfig:
              bigQueryOptions:
                tableReference:
                  projectId: project
                  datasetId: dataset
                  tableId: table_to_scan
                rowsLimitPercent: 50
                sampleMethod: RANDOM_START
    

    Dlp Job Trigger Job Notification Emails

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const jobNotificationEmails = new gcp.dataloss.PreventionJobTrigger("job_notification_emails", {
        parent: "projects/my-project-name",
        description: "Description for the job_trigger created by terraform",
        displayName: "TerraformDisplayName",
        triggers: [{
            schedule: {
                recurrencePeriodDuration: "86400s",
            },
        }],
        inspectJob: {
            inspectTemplateName: "sample-inspect-template",
            actions: [{
                jobNotificationEmails: {},
            }],
            storageConfig: {
                cloudStorageOptions: {
                    fileSet: {
                        url: "gs://mybucket/directory/",
                    },
                },
            },
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    job_notification_emails = gcp.dataloss.PreventionJobTrigger("job_notification_emails",
        parent="projects/my-project-name",
        description="Description for the job_trigger created by terraform",
        display_name="TerraformDisplayName",
        triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
            schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
                recurrence_period_duration="86400s",
            ),
        )],
        inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
            inspect_template_name="sample-inspect-template",
            actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
                job_notification_emails=gcp.dataloss.PreventionJobTriggerInspectJobActionJobNotificationEmailsArgs(),
            )],
            storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
                cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
                    file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
                        url="gs://mybucket/directory/",
                    ),
                ),
            ),
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dataloss.NewPreventionJobTrigger(ctx, "job_notification_emails", &dataloss.PreventionJobTriggerArgs{
    			Parent:      pulumi.String("projects/my-project-name"),
    			Description: pulumi.String("Description for the job_trigger created by terraform"),
    			DisplayName: pulumi.String("TerraformDisplayName"),
    			Triggers: dataloss.PreventionJobTriggerTriggerArray{
    				&dataloss.PreventionJobTriggerTriggerArgs{
    					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
    						RecurrencePeriodDuration: pulumi.String("86400s"),
    					},
    				},
    			},
    			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
    				InspectTemplateName: pulumi.String("sample-inspect-template"),
    				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
    					&dataloss.PreventionJobTriggerInspectJobActionArgs{
    						JobNotificationEmails: nil,
    					},
    				},
    				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
    					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
    						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
    							Url: pulumi.String("gs://mybucket/directory/"),
    						},
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var jobNotificationEmails = new Gcp.DataLoss.PreventionJobTrigger("job_notification_emails", new()
        {
            Parent = "projects/my-project-name",
            Description = "Description for the job_trigger created by terraform",
            DisplayName = "TerraformDisplayName",
            Triggers = new[]
            {
                new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
                {
                    Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
                    {
                        RecurrencePeriodDuration = "86400s",
                    },
                },
            },
            InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
            {
                InspectTemplateName = "sample-inspect-template",
                Actions = new[]
                {
                    new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
                    {
                        JobNotificationEmails = null,
                    },
                },
                StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
                {
                    CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
                    {
                        FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
                        {
                            Url = "gs://mybucket/directory/",
                        },
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataloss.PreventionJobTrigger;
    import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var jobNotificationEmails = new PreventionJobTrigger("jobNotificationEmails", PreventionJobTriggerArgs.builder()        
                .parent("projects/my-project-name")
                .description("Description for the job_trigger created by terraform")
                .displayName("TerraformDisplayName")
                .triggers(PreventionJobTriggerTriggerArgs.builder()
                    .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
                        .recurrencePeriodDuration("86400s")
                        .build())
                    .build())
                .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
                    .inspectTemplateName("sample-inspect-template")
                    .actions(PreventionJobTriggerInspectJobActionArgs.builder()
                        .jobNotificationEmails()
                        .build())
                    .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
                        .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
                            .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
                                .url("gs://mybucket/directory/")
                                .build())
                            .build())
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      jobNotificationEmails:
        type: gcp:dataloss:PreventionJobTrigger
        name: job_notification_emails
        properties:
          parent: projects/my-project-name
          description: Description for the job_trigger created by terraform
          displayName: TerraformDisplayName
          triggers:
            - schedule:
                recurrencePeriodDuration: 86400s
          inspectJob:
            inspectTemplateName: sample-inspect-template
            actions:
              - jobNotificationEmails: {}
            storageConfig:
              cloudStorageOptions:
                fileSet:
                  url: gs://mybucket/directory/
    

    Dlp Job Trigger Deidentify

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const _default = new gcp.bigquery.Dataset("default", {
        datasetId: "tf_test",
        friendlyName: "terraform-test",
        description: "Description for the dataset created by terraform",
        location: "US",
        defaultTableExpirationMs: 3600000,
        labels: {
            env: "default",
        },
    });
    const defaultTable = new gcp.bigquery.Table("default", {
        datasetId: _default.datasetId,
        tableId: "tf_test",
        deletionProtection: false,
        timePartitioning: {
            type: "DAY",
        },
        labels: {
            env: "default",
        },
        schema: `    [
        {
          "name": "quantity",
          "type": "NUMERIC",
          "mode": "NULLABLE",
          "description": "The quantity"
        },
        {
          "name": "name",
          "type": "STRING",
          "mode": "NULLABLE",
          "description": "Name of the object"
        }
        ]
    `,
    });
    const deidentify = new gcp.dataloss.PreventionJobTrigger("deidentify", {
        parent: "projects/my-project-name",
        description: "Description for the job_trigger created by terraform",
        displayName: "TerraformDisplayName",
        triggers: [{
            schedule: {
                recurrencePeriodDuration: "86400s",
            },
        }],
        inspectJob: {
            inspectTemplateName: "sample-inspect-template",
            actions: [{
                deidentify: {
                    cloudStorageOutput: "gs://samplebucket/dir/",
                    fileTypesToTransforms: [
                        "CSV",
                        "TSV",
                    ],
                    transformationDetailsStorageConfig: {
                        table: {
                            projectId: "my-project-name",
                            datasetId: _default.datasetId,
                            tableId: defaultTable.tableId,
                        },
                    },
                    transformationConfig: {
                        deidentifyTemplate: "sample-deidentify-template",
                        imageRedactTemplate: "sample-image-redact-template",
                        structuredDeidentifyTemplate: "sample-structured-deidentify-template",
                    },
                },
            }],
            storageConfig: {
                cloudStorageOptions: {
                    fileSet: {
                        url: "gs://mybucket/directory/",
                    },
                },
            },
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    default = gcp.bigquery.Dataset("default",
        dataset_id="tf_test",
        friendly_name="terraform-test",
        description="Description for the dataset created by terraform",
        location="US",
        default_table_expiration_ms=3600000,
        labels={
            "env": "default",
        })
    default_table = gcp.bigquery.Table("default",
        dataset_id=default.dataset_id,
        table_id="tf_test",
        deletion_protection=False,
        time_partitioning=gcp.bigquery.TableTimePartitioningArgs(
            type="DAY",
        ),
        labels={
            "env": "default",
        },
        schema="""    [
        {
          "name": "quantity",
          "type": "NUMERIC",
          "mode": "NULLABLE",
          "description": "The quantity"
        },
        {
          "name": "name",
          "type": "STRING",
          "mode": "NULLABLE",
          "description": "Name of the object"
        }
        ]
    """)
    deidentify = gcp.dataloss.PreventionJobTrigger("deidentify",
        parent="projects/my-project-name",
        description="Description for the job_trigger created by terraform",
        display_name="TerraformDisplayName",
        triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
            schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
                recurrence_period_duration="86400s",
            ),
        )],
        inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
            inspect_template_name="sample-inspect-template",
            actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
                deidentify=gcp.dataloss.PreventionJobTriggerInspectJobActionDeidentifyArgs(
                    cloud_storage_output="gs://samplebucket/dir/",
                    file_types_to_transforms=[
                        "CSV",
                        "TSV",
                    ],
                    transformation_details_storage_config=gcp.dataloss.PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigArgs(
                        table=gcp.dataloss.PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTableArgs(
                            project_id="my-project-name",
                            dataset_id=default.dataset_id,
                            table_id=default_table.table_id,
                        ),
                    ),
                    transformation_config=gcp.dataloss.PreventionJobTriggerInspectJobActionDeidentifyTransformationConfigArgs(
                        deidentify_template="sample-deidentify-template",
                        image_redact_template="sample-image-redact-template",
                        structured_deidentify_template="sample-structured-deidentify-template",
                    ),
                ),
            )],
            storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
                cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
                    file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
                        url="gs://mybucket/directory/",
                    ),
                ),
            ),
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := bigquery.NewDataset(ctx, "default", &bigquery.DatasetArgs{
    			DatasetId:                pulumi.String("tf_test"),
    			FriendlyName:             pulumi.String("terraform-test"),
    			Description:              pulumi.String("Description for the dataset created by terraform"),
    			Location:                 pulumi.String("US"),
    			DefaultTableExpirationMs: pulumi.Int(3600000),
    			Labels: pulumi.StringMap{
    				"env": pulumi.String("default"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		defaultTable, err := bigquery.NewTable(ctx, "default", &bigquery.TableArgs{
    			DatasetId:          _default.DatasetId,
    			TableId:            pulumi.String("tf_test"),
    			DeletionProtection: pulumi.Bool(false),
    			TimePartitioning: &bigquery.TableTimePartitioningArgs{
    				Type: pulumi.String("DAY"),
    			},
    			Labels: pulumi.StringMap{
    				"env": pulumi.String("default"),
    			},
    			Schema: pulumi.String(`    [
        {
          "name": "quantity",
          "type": "NUMERIC",
          "mode": "NULLABLE",
          "description": "The quantity"
        },
        {
          "name": "name",
          "type": "STRING",
          "mode": "NULLABLE",
          "description": "Name of the object"
        }
        ]
    `),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = dataloss.NewPreventionJobTrigger(ctx, "deidentify", &dataloss.PreventionJobTriggerArgs{
    			Parent:      pulumi.String("projects/my-project-name"),
    			Description: pulumi.String("Description for the job_trigger created by terraform"),
    			DisplayName: pulumi.String("TerraformDisplayName"),
    			Triggers: dataloss.PreventionJobTriggerTriggerArray{
    				&dataloss.PreventionJobTriggerTriggerArgs{
    					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
    						RecurrencePeriodDuration: pulumi.String("86400s"),
    					},
    				},
    			},
    			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
    				InspectTemplateName: pulumi.String("sample-inspect-template"),
    				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
    					&dataloss.PreventionJobTriggerInspectJobActionArgs{
    						Deidentify: &dataloss.PreventionJobTriggerInspectJobActionDeidentifyArgs{
    							CloudStorageOutput: pulumi.String("gs://samplebucket/dir/"),
    							FileTypesToTransforms: pulumi.StringArray{
    								pulumi.String("CSV"),
    								pulumi.String("TSV"),
    							},
    							TransformationDetailsStorageConfig: &dataloss.PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigArgs{
    								Table: &dataloss.PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTableArgs{
    									ProjectId: pulumi.String("my-project-name"),
    									DatasetId: _default.DatasetId,
    									TableId:   defaultTable.TableId,
    								},
    							},
    							TransformationConfig: &dataloss.PreventionJobTriggerInspectJobActionDeidentifyTransformationConfigArgs{
    								DeidentifyTemplate:           pulumi.String("sample-deidentify-template"),
    								ImageRedactTemplate:          pulumi.String("sample-image-redact-template"),
    								StructuredDeidentifyTemplate: pulumi.String("sample-structured-deidentify-template"),
    							},
    						},
    					},
    				},
    				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
    					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
    						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
    							Url: pulumi.String("gs://mybucket/directory/"),
    						},
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var @default = new Gcp.BigQuery.Dataset("default", new()
        {
            DatasetId = "tf_test",
            FriendlyName = "terraform-test",
            Description = "Description for the dataset created by terraform",
            Location = "US",
            DefaultTableExpirationMs = 3600000,
            Labels = 
            {
                { "env", "default" },
            },
        });
    
        var defaultTable = new Gcp.BigQuery.Table("default", new()
        {
            DatasetId = @default.DatasetId,
            TableId = "tf_test",
            DeletionProtection = false,
            TimePartitioning = new Gcp.BigQuery.Inputs.TableTimePartitioningArgs
            {
                Type = "DAY",
            },
            Labels = 
            {
                { "env", "default" },
            },
            Schema = @"    [
        {
          ""name"": ""quantity"",
          ""type"": ""NUMERIC"",
          ""mode"": ""NULLABLE"",
          ""description"": ""The quantity""
        },
        {
          ""name"": ""name"",
          ""type"": ""STRING"",
          ""mode"": ""NULLABLE"",
          ""description"": ""Name of the object""
        }
        ]
    ",
        });
    
        var deidentify = new Gcp.DataLoss.PreventionJobTrigger("deidentify", new()
        {
            Parent = "projects/my-project-name",
            Description = "Description for the job_trigger created by terraform",
            DisplayName = "TerraformDisplayName",
            Triggers = new[]
            {
                new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
                {
                    Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
                    {
                        RecurrencePeriodDuration = "86400s",
                    },
                },
            },
            InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
            {
                InspectTemplateName = "sample-inspect-template",
                Actions = new[]
                {
                    new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
                    {
                        Deidentify = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionDeidentifyArgs
                        {
                            CloudStorageOutput = "gs://samplebucket/dir/",
                            FileTypesToTransforms = new[]
                            {
                                "CSV",
                                "TSV",
                            },
                            TransformationDetailsStorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigArgs
                            {
                                Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTableArgs
                                {
                                    ProjectId = "my-project-name",
                                    DatasetId = @default.DatasetId,
                                    TableId = defaultTable.TableId,
                                },
                            },
                            TransformationConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionDeidentifyTransformationConfigArgs
                            {
                                DeidentifyTemplate = "sample-deidentify-template",
                                ImageRedactTemplate = "sample-image-redact-template",
                                StructuredDeidentifyTemplate = "sample-structured-deidentify-template",
                            },
                        },
                    },
                },
                StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
                {
                    CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
                    {
                        FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
                        {
                            Url = "gs://mybucket/directory/",
                        },
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.bigquery.Dataset;
    import com.pulumi.gcp.bigquery.DatasetArgs;
    import com.pulumi.gcp.bigquery.Table;
    import com.pulumi.gcp.bigquery.TableArgs;
    import com.pulumi.gcp.bigquery.inputs.TableTimePartitioningArgs;
    import com.pulumi.gcp.dataloss.PreventionJobTrigger;
    import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var default_ = new Dataset("default", DatasetArgs.builder()        
                .datasetId("tf_test")
                .friendlyName("terraform-test")
                .description("Description for the dataset created by terraform")
                .location("US")
                .defaultTableExpirationMs(3600000)
                .labels(Map.of("env", "default"))
                .build());
    
            var defaultTable = new Table("defaultTable", TableArgs.builder()        
                .datasetId(default_.datasetId())
                .tableId("tf_test")
                .deletionProtection(false)
                .timePartitioning(TableTimePartitioningArgs.builder()
                    .type("DAY")
                    .build())
                .labels(Map.of("env", "default"))
                .schema("""
        [
        {
          "name": "quantity",
          "type": "NUMERIC",
          "mode": "NULLABLE",
          "description": "The quantity"
        },
        {
          "name": "name",
          "type": "STRING",
          "mode": "NULLABLE",
          "description": "Name of the object"
        }
        ]
                """)
                .build());
    
            var deidentify = new PreventionJobTrigger("deidentify", PreventionJobTriggerArgs.builder()        
                .parent("projects/my-project-name")
                .description("Description for the job_trigger created by terraform")
                .displayName("TerraformDisplayName")
                .triggers(PreventionJobTriggerTriggerArgs.builder()
                    .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
                        .recurrencePeriodDuration("86400s")
                        .build())
                    .build())
                .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
                    .inspectTemplateName("sample-inspect-template")
                    .actions(PreventionJobTriggerInspectJobActionArgs.builder()
                        .deidentify(PreventionJobTriggerInspectJobActionDeidentifyArgs.builder()
                            .cloudStorageOutput("gs://samplebucket/dir/")
                            .fileTypesToTransforms(                        
                                "CSV",
                                "TSV")
                            .transformationDetailsStorageConfig(PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigArgs.builder()
                                .table(PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTableArgs.builder()
                                    .projectId("my-project-name")
                                    .datasetId(default_.datasetId())
                                    .tableId(defaultTable.tableId())
                                    .build())
                                .build())
                            .transformationConfig(PreventionJobTriggerInspectJobActionDeidentifyTransformationConfigArgs.builder()
                                .deidentifyTemplate("sample-deidentify-template")
                                .imageRedactTemplate("sample-image-redact-template")
                                .structuredDeidentifyTemplate("sample-structured-deidentify-template")
                                .build())
                            .build())
                        .build())
                    .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
                        .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
                            .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
                                .url("gs://mybucket/directory/")
                                .build())
                            .build())
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      deidentify:
        type: gcp:dataloss:PreventionJobTrigger
        properties:
          parent: projects/my-project-name
          description: Description for the job_trigger created by terraform
          displayName: TerraformDisplayName
          triggers:
            - schedule:
                recurrencePeriodDuration: 86400s
          inspectJob:
            inspectTemplateName: sample-inspect-template
            actions:
              - deidentify:
                  cloudStorageOutput: gs://samplebucket/dir/
                  fileTypesToTransforms:
                    - CSV
                    - TSV
                  transformationDetailsStorageConfig:
                    table:
                      projectId: my-project-name
                      datasetId: ${default.datasetId}
                      tableId: ${defaultTable.tableId}
                  transformationConfig:
                    deidentifyTemplate: sample-deidentify-template
                    imageRedactTemplate: sample-image-redact-template
                    structuredDeidentifyTemplate: sample-structured-deidentify-template
            storageConfig:
              cloudStorageOptions:
                fileSet:
                  url: gs://mybucket/directory/
      default:
        type: gcp:bigquery:Dataset
        properties:
          datasetId: tf_test
          friendlyName: terraform-test
          description: Description for the dataset created by terraform
          location: US
          defaultTableExpirationMs: 3.6e+06
          labels:
            env: default
      defaultTable:
        type: gcp:bigquery:Table
        name: default
        properties:
          datasetId: ${default.datasetId}
          tableId: tf_test
          deletionProtection: false
          timePartitioning:
            type: DAY
          labels:
            env: default
          schema: |2
                [
                {
                  "name": "quantity",
                  "type": "NUMERIC",
                  "mode": "NULLABLE",
                  "description": "The quantity"
                },
                {
                  "name": "name",
                  "type": "STRING",
                  "mode": "NULLABLE",
                  "description": "Name of the object"
                }
                ]
    

    Dlp Job Trigger Hybrid

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const hybridTrigger = new gcp.dataloss.PreventionJobTrigger("hybrid_trigger", {
        parent: "projects/my-project-name",
        triggers: [{
            manual: {},
        }],
        inspectJob: {
            inspectTemplateName: "fake",
            actions: [{
                saveFindings: {
                    outputConfig: {
                        table: {
                            projectId: "project",
                            datasetId: "dataset",
                        },
                    },
                },
            }],
            storageConfig: {
                hybridOptions: {
                    description: "Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings",
                    requiredFindingLabelKeys: ["appointment-bookings-comments"],
                    labels: {
                        env: "prod",
                    },
                    tableOptions: {
                        identifyingFields: [{
                            name: "booking_id",
                        }],
                    },
                },
            },
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    hybrid_trigger = gcp.dataloss.PreventionJobTrigger("hybrid_trigger",
        parent="projects/my-project-name",
        triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
            manual=gcp.dataloss.PreventionJobTriggerTriggerManualArgs(),
        )],
        inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
            inspect_template_name="fake",
            actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
                save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
                    output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
                        table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
                            project_id="project",
                            dataset_id="dataset",
                        ),
                    ),
                ),
            )],
            storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
                hybrid_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs(
                    description="Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings",
                    required_finding_label_keys=["appointment-bookings-comments"],
                    labels={
                        "env": "prod",
                    },
                    table_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs(
                        identifying_fields=[gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingFieldArgs(
                            name="booking_id",
                        )],
                    ),
                ),
            ),
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dataloss.NewPreventionJobTrigger(ctx, "hybrid_trigger", &dataloss.PreventionJobTriggerArgs{
    			Parent: pulumi.String("projects/my-project-name"),
    			Triggers: dataloss.PreventionJobTriggerTriggerArray{
    				&dataloss.PreventionJobTriggerTriggerArgs{
    					Manual: nil,
    				},
    			},
    			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
    				InspectTemplateName: pulumi.String("fake"),
    				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
    					&dataloss.PreventionJobTriggerInspectJobActionArgs{
    						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
    							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
    								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
    									ProjectId: pulumi.String("project"),
    									DatasetId: pulumi.String("dataset"),
    								},
    							},
    						},
    					},
    				},
    				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
    					HybridOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs{
    						Description: pulumi.String("Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings"),
    						RequiredFindingLabelKeys: pulumi.StringArray{
    							pulumi.String("appointment-bookings-comments"),
    						},
    						Labels: pulumi.StringMap{
    							"env": pulumi.String("prod"),
    						},
    						TableOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs{
    							IdentifyingFields: dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingFieldArray{
    								&dataloss.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingFieldArgs{
    									Name: pulumi.String("booking_id"),
    								},
    							},
    						},
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var hybridTrigger = new Gcp.DataLoss.PreventionJobTrigger("hybrid_trigger", new()
        {
            Parent = "projects/my-project-name",
            Triggers = new[]
            {
                new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
                {
                    Manual = null,
                },
            },
            InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
            {
                InspectTemplateName = "fake",
                Actions = new[]
                {
                    new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
                    {
                        SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
                        {
                            OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
                            {
                                Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
                                {
                                    ProjectId = "project",
                                    DatasetId = "dataset",
                                },
                            },
                        },
                    },
                },
                StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
                {
                    HybridOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs
                    {
                        Description = "Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings",
                        RequiredFindingLabelKeys = new[]
                        {
                            "appointment-bookings-comments",
                        },
                        Labels = 
                        {
                            { "env", "prod" },
                        },
                        TableOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs
                        {
                            IdentifyingFields = new[]
                            {
                                new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingFieldArgs
                                {
                                    Name = "booking_id",
                                },
                            },
                        },
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataloss.PreventionJobTrigger;
    import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerManualArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var hybridTrigger = new PreventionJobTrigger("hybridTrigger", PreventionJobTriggerArgs.builder()        
                .parent("projects/my-project-name")
                .triggers(PreventionJobTriggerTriggerArgs.builder()
                    .manual()
                    .build())
                .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
                    .inspectTemplateName("fake")
                    .actions(PreventionJobTriggerInspectJobActionArgs.builder()
                        .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
                            .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
                                .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
                                    .projectId("project")
                                    .datasetId("dataset")
                                    .build())
                                .build())
                            .build())
                        .build())
                    .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
                        .hybridOptions(PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs.builder()
                            .description("Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings")
                            .requiredFindingLabelKeys("appointment-bookings-comments")
                            .labels(Map.of("env", "prod"))
                            .tableOptions(PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs.builder()
                                .identifyingFields(PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingFieldArgs.builder()
                                    .name("booking_id")
                                    .build())
                                .build())
                            .build())
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      hybridTrigger:
        type: gcp:dataloss:PreventionJobTrigger
        name: hybrid_trigger
        properties:
          parent: projects/my-project-name
          triggers:
            - manual: {}
          inspectJob:
            inspectTemplateName: fake
            actions:
              - saveFindings:
                  outputConfig:
                    table:
                      projectId: project
                      datasetId: dataset
            storageConfig:
              hybridOptions:
                description: Hybrid job trigger for data from the comments field of a table that contains customer appointment bookings
                requiredFindingLabelKeys:
                  - appointment-bookings-comments
                labels:
                  env: prod
                tableOptions:
                  identifyingFields:
                    - name: booking_id
    

    Dlp Job Trigger Inspect

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const inspect = new gcp.dataloss.PreventionJobTrigger("inspect", {
        parent: "projects/my-project-name",
        description: "Description",
        displayName: "Displayname",
        triggers: [{
            schedule: {
                recurrencePeriodDuration: "86400s",
            },
        }],
        inspectJob: {
            inspectTemplateName: "fake",
            actions: [{
                saveFindings: {
                    outputConfig: {
                        table: {
                            projectId: "project",
                            datasetId: "dataset",
                        },
                    },
                },
            }],
            storageConfig: {
                cloudStorageOptions: {
                    fileSet: {
                        url: "gs://mybucket/directory/",
                    },
                },
            },
            inspectConfig: {
                customInfoTypes: [{
                    infoType: {
                        name: "MY_CUSTOM_TYPE",
                    },
                    likelihood: "UNLIKELY",
                    regex: {
                        pattern: "test*",
                    },
                }],
                infoTypes: [{
                    name: "EMAIL_ADDRESS",
                }],
                minLikelihood: "UNLIKELY",
                ruleSets: [
                    {
                        infoTypes: [{
                            name: "EMAIL_ADDRESS",
                        }],
                        rules: [{
                            exclusionRule: {
                                regex: {
                                    pattern: ".+@example.com",
                                },
                                matchingType: "MATCHING_TYPE_FULL_MATCH",
                            },
                        }],
                    },
                    {
                        infoTypes: [{
                            name: "MY_CUSTOM_TYPE",
                        }],
                        rules: [{
                            hotwordRule: {
                                hotwordRegex: {
                                    pattern: "example*",
                                },
                                proximity: {
                                    windowBefore: 50,
                                },
                                likelihoodAdjustment: {
                                    fixedLikelihood: "VERY_LIKELY",
                                },
                            },
                        }],
                    },
                ],
                limits: {
                    maxFindingsPerItem: 10,
                    maxFindingsPerRequest: 50,
                },
            },
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    inspect = gcp.dataloss.PreventionJobTrigger("inspect",
        parent="projects/my-project-name",
        description="Description",
        display_name="Displayname",
        triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
            schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
                recurrence_period_duration="86400s",
            ),
        )],
        inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
            inspect_template_name="fake",
            actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
                save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
                    output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
                        table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
                            project_id="project",
                            dataset_id="dataset",
                        ),
                    ),
                ),
            )],
            storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
                cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
                    file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
                        url="gs://mybucket/directory/",
                    ),
                ),
            ),
            inspect_config=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigArgs(
                custom_info_types=[gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeArgs(
                    info_type=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeArgs(
                        name="MY_CUSTOM_TYPE",
                    ),
                    likelihood="UNLIKELY",
                    regex=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegexArgs(
                        pattern="test*",
                    ),
                )],
                info_types=[gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigInfoTypeArgs(
                    name="EMAIL_ADDRESS",
                )],
                min_likelihood="UNLIKELY",
                rule_sets=[
                    gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetArgs(
                        info_types=[gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs(
                            name="EMAIL_ADDRESS",
                        )],
                        rules=[gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs(
                            exclusion_rule=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleArgs(
                                regex=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegexArgs(
                                    pattern=".+@example.com",
                                ),
                                matching_type="MATCHING_TYPE_FULL_MATCH",
                            ),
                        )],
                    ),
                    gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetArgs(
                        info_types=[gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs(
                            name="MY_CUSTOM_TYPE",
                        )],
                        rules=[gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs(
                            hotword_rule=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleArgs(
                                hotword_regex=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegexArgs(
                                    pattern="example*",
                                ),
                                proximity=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximityArgs(
                                    window_before=50,
                                ),
                                likelihood_adjustment=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustmentArgs(
                                    fixed_likelihood="VERY_LIKELY",
                                ),
                            ),
                        )],
                    ),
                ],
                limits=gcp.dataloss.PreventionJobTriggerInspectJobInspectConfigLimitsArgs(
                    max_findings_per_item=10,
                    max_findings_per_request=50,
                ),
            ),
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dataloss.NewPreventionJobTrigger(ctx, "inspect", &dataloss.PreventionJobTriggerArgs{
    			Parent:      pulumi.String("projects/my-project-name"),
    			Description: pulumi.String("Description"),
    			DisplayName: pulumi.String("Displayname"),
    			Triggers: dataloss.PreventionJobTriggerTriggerArray{
    				&dataloss.PreventionJobTriggerTriggerArgs{
    					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
    						RecurrencePeriodDuration: pulumi.String("86400s"),
    					},
    				},
    			},
    			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
    				InspectTemplateName: pulumi.String("fake"),
    				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
    					&dataloss.PreventionJobTriggerInspectJobActionArgs{
    						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
    							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
    								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
    									ProjectId: pulumi.String("project"),
    									DatasetId: pulumi.String("dataset"),
    								},
    							},
    						},
    					},
    				},
    				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
    					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
    						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
    							Url: pulumi.String("gs://mybucket/directory/"),
    						},
    					},
    				},
    				InspectConfig: &dataloss.PreventionJobTriggerInspectJobInspectConfigArgs{
    					CustomInfoTypes: dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeArray{
    						&dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeArgs{
    							InfoType: &dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeArgs{
    								Name: pulumi.String("MY_CUSTOM_TYPE"),
    							},
    							Likelihood: pulumi.String("UNLIKELY"),
    							Regex: &dataloss.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegexArgs{
    								Pattern: pulumi.String("test*"),
    							},
    						},
    					},
    					InfoTypes: dataloss.PreventionJobTriggerInspectJobInspectConfigInfoTypeArray{
    						&dataloss.PreventionJobTriggerInspectJobInspectConfigInfoTypeArgs{
    							Name: pulumi.String("EMAIL_ADDRESS"),
    						},
    					},
    					MinLikelihood: pulumi.String("UNLIKELY"),
    					RuleSets: dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetArray{
    						&dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetArgs{
    							InfoTypes: dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArray{
    								&dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs{
    									Name: pulumi.String("EMAIL_ADDRESS"),
    								},
    							},
    							Rules: dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArray{
    								&dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs{
    									ExclusionRule: &dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleArgs{
    										Regex: &dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegexArgs{
    											Pattern: pulumi.String(".+@example.com"),
    										},
    										MatchingType: pulumi.String("MATCHING_TYPE_FULL_MATCH"),
    									},
    								},
    							},
    						},
    						&dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetArgs{
    							InfoTypes: dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArray{
    								&dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs{
    									Name: pulumi.String("MY_CUSTOM_TYPE"),
    								},
    							},
    							Rules: dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArray{
    								&dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs{
    									HotwordRule: &dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleArgs{
    										HotwordRegex: &dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegexArgs{
    											Pattern: pulumi.String("example*"),
    										},
    										Proximity: &dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximityArgs{
    											WindowBefore: pulumi.Int(50),
    										},
    										LikelihoodAdjustment: &dataloss.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustmentArgs{
    											FixedLikelihood: pulumi.String("VERY_LIKELY"),
    										},
    									},
    								},
    							},
    						},
    					},
    					Limits: &dataloss.PreventionJobTriggerInspectJobInspectConfigLimitsArgs{
    						MaxFindingsPerItem:    pulumi.Int(10),
    						MaxFindingsPerRequest: pulumi.Int(50),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var inspect = new Gcp.DataLoss.PreventionJobTrigger("inspect", new()
        {
            Parent = "projects/my-project-name",
            Description = "Description",
            DisplayName = "Displayname",
            Triggers = new[]
            {
                new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
                {
                    Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
                    {
                        RecurrencePeriodDuration = "86400s",
                    },
                },
            },
            InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
            {
                InspectTemplateName = "fake",
                Actions = new[]
                {
                    new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
                    {
                        SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
                        {
                            OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
                            {
                                Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
                                {
                                    ProjectId = "project",
                                    DatasetId = "dataset",
                                },
                            },
                        },
                    },
                },
                StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
                {
                    CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
                    {
                        FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
                        {
                            Url = "gs://mybucket/directory/",
                        },
                    },
                },
                InspectConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigArgs
                {
                    CustomInfoTypes = new[]
                    {
                        new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeArgs
                        {
                            InfoType = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeArgs
                            {
                                Name = "MY_CUSTOM_TYPE",
                            },
                            Likelihood = "UNLIKELY",
                            Regex = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegexArgs
                            {
                                Pattern = "test*",
                            },
                        },
                    },
                    InfoTypes = new[]
                    {
                        new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigInfoTypeArgs
                        {
                            Name = "EMAIL_ADDRESS",
                        },
                    },
                    MinLikelihood = "UNLIKELY",
                    RuleSets = new[]
                    {
                        new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetArgs
                        {
                            InfoTypes = new[]
                            {
                                new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs
                                {
                                    Name = "EMAIL_ADDRESS",
                                },
                            },
                            Rules = new[]
                            {
                                new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs
                                {
                                    ExclusionRule = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleArgs
                                    {
                                        Regex = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegexArgs
                                        {
                                            Pattern = ".+@example.com",
                                        },
                                        MatchingType = "MATCHING_TYPE_FULL_MATCH",
                                    },
                                },
                            },
                        },
                        new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetArgs
                        {
                            InfoTypes = new[]
                            {
                                new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs
                                {
                                    Name = "MY_CUSTOM_TYPE",
                                },
                            },
                            Rules = new[]
                            {
                                new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs
                                {
                                    HotwordRule = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleArgs
                                    {
                                        HotwordRegex = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegexArgs
                                        {
                                            Pattern = "example*",
                                        },
                                        Proximity = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximityArgs
                                        {
                                            WindowBefore = 50,
                                        },
                                        LikelihoodAdjustment = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustmentArgs
                                        {
                                            FixedLikelihood = "VERY_LIKELY",
                                        },
                                    },
                                },
                            },
                        },
                    },
                    Limits = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobInspectConfigLimitsArgs
                    {
                        MaxFindingsPerItem = 10,
                        MaxFindingsPerRequest = 50,
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataloss.PreventionJobTrigger;
    import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobInspectConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobInspectConfigLimitsArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var inspect = new PreventionJobTrigger("inspect", PreventionJobTriggerArgs.builder()        
                .parent("projects/my-project-name")
                .description("Description")
                .displayName("Displayname")
                .triggers(PreventionJobTriggerTriggerArgs.builder()
                    .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
                        .recurrencePeriodDuration("86400s")
                        .build())
                    .build())
                .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
                    .inspectTemplateName("fake")
                    .actions(PreventionJobTriggerInspectJobActionArgs.builder()
                        .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
                            .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
                                .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
                                    .projectId("project")
                                    .datasetId("dataset")
                                    .build())
                                .build())
                            .build())
                        .build())
                    .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
                        .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
                            .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
                                .url("gs://mybucket/directory/")
                                .build())
                            .build())
                        .build())
                    .inspectConfig(PreventionJobTriggerInspectJobInspectConfigArgs.builder()
                        .customInfoTypes(PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeArgs.builder()
                            .infoType(PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeArgs.builder()
                                .name("MY_CUSTOM_TYPE")
                                .build())
                            .likelihood("UNLIKELY")
                            .regex(PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegexArgs.builder()
                                .pattern("test*")
                                .build())
                            .build())
                        .infoTypes(PreventionJobTriggerInspectJobInspectConfigInfoTypeArgs.builder()
                            .name("EMAIL_ADDRESS")
                            .build())
                        .minLikelihood("UNLIKELY")
                        .ruleSets(                    
                            PreventionJobTriggerInspectJobInspectConfigRuleSetArgs.builder()
                                .infoTypes(PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs.builder()
                                    .name("EMAIL_ADDRESS")
                                    .build())
                                .rules(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs.builder()
                                    .exclusionRule(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleArgs.builder()
                                        .regex(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegexArgs.builder()
                                            .pattern(".+@example.com")
                                            .build())
                                        .matchingType("MATCHING_TYPE_FULL_MATCH")
                                        .build())
                                    .build())
                                .build(),
                            PreventionJobTriggerInspectJobInspectConfigRuleSetArgs.builder()
                                .infoTypes(PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs.builder()
                                    .name("MY_CUSTOM_TYPE")
                                    .build())
                                .rules(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs.builder()
                                    .hotwordRule(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleArgs.builder()
                                        .hotwordRegex(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegexArgs.builder()
                                            .pattern("example*")
                                            .build())
                                        .proximity(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximityArgs.builder()
                                            .windowBefore(50)
                                            .build())
                                        .likelihoodAdjustment(PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustmentArgs.builder()
                                            .fixedLikelihood("VERY_LIKELY")
                                            .build())
                                        .build())
                                    .build())
                                .build())
                        .limits(PreventionJobTriggerInspectJobInspectConfigLimitsArgs.builder()
                            .maxFindingsPerItem(10)
                            .maxFindingsPerRequest(50)
                            .build())
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      inspect:
        type: gcp:dataloss:PreventionJobTrigger
        properties:
          parent: projects/my-project-name
          description: Description
          displayName: Displayname
          triggers:
            - schedule:
                recurrencePeriodDuration: 86400s
          inspectJob:
            inspectTemplateName: fake
            actions:
              - saveFindings:
                  outputConfig:
                    table:
                      projectId: project
                      datasetId: dataset
            storageConfig:
              cloudStorageOptions:
                fileSet:
                  url: gs://mybucket/directory/
            inspectConfig:
              customInfoTypes:
                - infoType:
                    name: MY_CUSTOM_TYPE
                  likelihood: UNLIKELY
                  regex:
                    pattern: test*
              infoTypes:
                - name: EMAIL_ADDRESS
              minLikelihood: UNLIKELY
              ruleSets:
                - infoTypes:
                    - name: EMAIL_ADDRESS
                  rules:
                    - exclusionRule:
                        regex:
                          pattern: .+@example.com
                        matchingType: MATCHING_TYPE_FULL_MATCH
                - infoTypes:
                    - name: MY_CUSTOM_TYPE
                  rules:
                    - hotwordRule:
                        hotwordRegex:
                          pattern: example*
                        proximity:
                          windowBefore: 50
                        likelihoodAdjustment:
                          fixedLikelihood: VERY_LIKELY
              limits:
                maxFindingsPerItem: 10
                maxFindingsPerRequest: 50
    

    Dlp Job Trigger Publish To Stackdriver

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const publishToStackdriver = new gcp.dataloss.PreventionJobTrigger("publish_to_stackdriver", {
        parent: "projects/my-project-name",
        description: "Description for the job_trigger created by terraform",
        displayName: "TerraformDisplayName",
        triggers: [{
            schedule: {
                recurrencePeriodDuration: "86400s",
            },
        }],
        inspectJob: {
            inspectTemplateName: "sample-inspect-template",
            actions: [{
                publishToStackdriver: {},
            }],
            storageConfig: {
                cloudStorageOptions: {
                    fileSet: {
                        url: "gs://mybucket/directory/",
                    },
                },
            },
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    publish_to_stackdriver = gcp.dataloss.PreventionJobTrigger("publish_to_stackdriver",
        parent="projects/my-project-name",
        description="Description for the job_trigger created by terraform",
        display_name="TerraformDisplayName",
        triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
            schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
                recurrence_period_duration="86400s",
            ),
        )],
        inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
            inspect_template_name="sample-inspect-template",
            actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
                publish_to_stackdriver=gcp.dataloss.PreventionJobTriggerInspectJobActionPublishToStackdriverArgs(),
            )],
            storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
                cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
                    file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
                        url="gs://mybucket/directory/",
                    ),
                ),
            ),
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dataloss.NewPreventionJobTrigger(ctx, "publish_to_stackdriver", &dataloss.PreventionJobTriggerArgs{
    			Parent:      pulumi.String("projects/my-project-name"),
    			Description: pulumi.String("Description for the job_trigger created by terraform"),
    			DisplayName: pulumi.String("TerraformDisplayName"),
    			Triggers: dataloss.PreventionJobTriggerTriggerArray{
    				&dataloss.PreventionJobTriggerTriggerArgs{
    					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
    						RecurrencePeriodDuration: pulumi.String("86400s"),
    					},
    				},
    			},
    			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
    				InspectTemplateName: pulumi.String("sample-inspect-template"),
    				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
    					&dataloss.PreventionJobTriggerInspectJobActionArgs{
    						PublishToStackdriver: nil,
    					},
    				},
    				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
    					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
    						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
    							Url: pulumi.String("gs://mybucket/directory/"),
    						},
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var publishToStackdriver = new Gcp.DataLoss.PreventionJobTrigger("publish_to_stackdriver", new()
        {
            Parent = "projects/my-project-name",
            Description = "Description for the job_trigger created by terraform",
            DisplayName = "TerraformDisplayName",
            Triggers = new[]
            {
                new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
                {
                    Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
                    {
                        RecurrencePeriodDuration = "86400s",
                    },
                },
            },
            InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
            {
                InspectTemplateName = "sample-inspect-template",
                Actions = new[]
                {
                    new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
                    {
                        PublishToStackdriver = null,
                    },
                },
                StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
                {
                    CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
                    {
                        FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
                        {
                            Url = "gs://mybucket/directory/",
                        },
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataloss.PreventionJobTrigger;
    import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var publishToStackdriver = new PreventionJobTrigger("publishToStackdriver", PreventionJobTriggerArgs.builder()        
                .parent("projects/my-project-name")
                .description("Description for the job_trigger created by terraform")
                .displayName("TerraformDisplayName")
                .triggers(PreventionJobTriggerTriggerArgs.builder()
                    .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
                        .recurrencePeriodDuration("86400s")
                        .build())
                    .build())
                .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
                    .inspectTemplateName("sample-inspect-template")
                    .actions(PreventionJobTriggerInspectJobActionArgs.builder()
                        .publishToStackdriver()
                        .build())
                    .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
                        .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
                            .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
                                .url("gs://mybucket/directory/")
                                .build())
                            .build())
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      publishToStackdriver:
        type: gcp:dataloss:PreventionJobTrigger
        name: publish_to_stackdriver
        properties:
          parent: projects/my-project-name
          description: Description for the job_trigger created by terraform
          displayName: TerraformDisplayName
          triggers:
            - schedule:
                recurrencePeriodDuration: 86400s
          inspectJob:
            inspectTemplateName: sample-inspect-template
            actions:
              - publishToStackdriver: {}
            storageConfig:
              cloudStorageOptions:
                fileSet:
                  url: gs://mybucket/directory/
    

    Dlp Job Trigger With Id

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const withTriggerId = new gcp.dataloss.PreventionJobTrigger("with_trigger_id", {
        parent: "projects/my-project-name",
        description: "Starting description",
        displayName: "display",
        triggerId: "id-",
        triggers: [{
            schedule: {
                recurrencePeriodDuration: "86400s",
            },
        }],
        inspectJob: {
            inspectTemplateName: "fake",
            actions: [{
                saveFindings: {
                    outputConfig: {
                        table: {
                            projectId: "project",
                            datasetId: "dataset123",
                        },
                    },
                },
            }],
            storageConfig: {
                cloudStorageOptions: {
                    fileSet: {
                        url: "gs://mybucket/directory/",
                    },
                },
            },
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    with_trigger_id = gcp.dataloss.PreventionJobTrigger("with_trigger_id",
        parent="projects/my-project-name",
        description="Starting description",
        display_name="display",
        trigger_id="id-",
        triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
            schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
                recurrence_period_duration="86400s",
            ),
        )],
        inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
            inspect_template_name="fake",
            actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
                save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
                    output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
                        table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
                            project_id="project",
                            dataset_id="dataset123",
                        ),
                    ),
                ),
            )],
            storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
                cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
                    file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
                        url="gs://mybucket/directory/",
                    ),
                ),
            ),
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dataloss.NewPreventionJobTrigger(ctx, "with_trigger_id", &dataloss.PreventionJobTriggerArgs{
    			Parent:      pulumi.String("projects/my-project-name"),
    			Description: pulumi.String("Starting description"),
    			DisplayName: pulumi.String("display"),
    			TriggerId:   pulumi.String("id-"),
    			Triggers: dataloss.PreventionJobTriggerTriggerArray{
    				&dataloss.PreventionJobTriggerTriggerArgs{
    					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
    						RecurrencePeriodDuration: pulumi.String("86400s"),
    					},
    				},
    			},
    			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
    				InspectTemplateName: pulumi.String("fake"),
    				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
    					&dataloss.PreventionJobTriggerInspectJobActionArgs{
    						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
    							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
    								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
    									ProjectId: pulumi.String("project"),
    									DatasetId: pulumi.String("dataset123"),
    								},
    							},
    						},
    					},
    				},
    				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
    					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
    						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
    							Url: pulumi.String("gs://mybucket/directory/"),
    						},
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var withTriggerId = new Gcp.DataLoss.PreventionJobTrigger("with_trigger_id", new()
        {
            Parent = "projects/my-project-name",
            Description = "Starting description",
            DisplayName = "display",
            TriggerId = "id-",
            Triggers = new[]
            {
                new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
                {
                    Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
                    {
                        RecurrencePeriodDuration = "86400s",
                    },
                },
            },
            InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
            {
                InspectTemplateName = "fake",
                Actions = new[]
                {
                    new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
                    {
                        SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
                        {
                            OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
                            {
                                Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
                                {
                                    ProjectId = "project",
                                    DatasetId = "dataset123",
                                },
                            },
                        },
                    },
                },
                StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
                {
                    CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
                    {
                        FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
                        {
                            Url = "gs://mybucket/directory/",
                        },
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataloss.PreventionJobTrigger;
    import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var withTriggerId = new PreventionJobTrigger("withTriggerId", PreventionJobTriggerArgs.builder()        
                .parent("projects/my-project-name")
                .description("Starting description")
                .displayName("display")
                .triggerId("id-")
                .triggers(PreventionJobTriggerTriggerArgs.builder()
                    .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
                        .recurrencePeriodDuration("86400s")
                        .build())
                    .build())
                .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
                    .inspectTemplateName("fake")
                    .actions(PreventionJobTriggerInspectJobActionArgs.builder()
                        .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
                            .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
                                .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
                                    .projectId("project")
                                    .datasetId("dataset123")
                                    .build())
                                .build())
                            .build())
                        .build())
                    .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
                        .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
                            .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
                                .url("gs://mybucket/directory/")
                                .build())
                            .build())
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      withTriggerId:
        type: gcp:dataloss:PreventionJobTrigger
        name: with_trigger_id
        properties:
          parent: projects/my-project-name
          description: Starting description
          displayName: display
          triggerId: id-
          triggers:
            - schedule:
                recurrencePeriodDuration: 86400s
          inspectJob:
            inspectTemplateName: fake
            actions:
              - saveFindings:
                  outputConfig:
                    table:
                      projectId: project
                      datasetId: dataset123
            storageConfig:
              cloudStorageOptions:
                fileSet:
                  url: gs://mybucket/directory/
    

    Dlp Job Trigger Multiple Actions

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const basic = new gcp.dataloss.PreventionJobTrigger("basic", {
        parent: "projects/my-project-name",
        description: "Description",
        displayName: "Displayname",
        triggers: [{
            schedule: {
                recurrencePeriodDuration: "86400s",
            },
        }],
        inspectJob: {
            inspectTemplateName: "fake",
            actions: [
                {
                    saveFindings: {
                        outputConfig: {
                            table: {
                                projectId: "project",
                                datasetId: "dataset",
                            },
                        },
                    },
                },
                {
                    pubSub: {
                        topic: "projects/project/topics/topic-name",
                    },
                },
            ],
            storageConfig: {
                cloudStorageOptions: {
                    fileSet: {
                        url: "gs://mybucket/directory/",
                    },
                },
            },
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    basic = gcp.dataloss.PreventionJobTrigger("basic",
        parent="projects/my-project-name",
        description="Description",
        display_name="Displayname",
        triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
            schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
                recurrence_period_duration="86400s",
            ),
        )],
        inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
            inspect_template_name="fake",
            actions=[
                gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
                    save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
                        output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
                            table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
                                project_id="project",
                                dataset_id="dataset",
                            ),
                        ),
                    ),
                ),
                gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
                    pub_sub=gcp.dataloss.PreventionJobTriggerInspectJobActionPubSubArgs(
                        topic="projects/project/topics/topic-name",
                    ),
                ),
            ],
            storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
                cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
                    file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
                        url="gs://mybucket/directory/",
                    ),
                ),
            ),
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dataloss.NewPreventionJobTrigger(ctx, "basic", &dataloss.PreventionJobTriggerArgs{
    			Parent:      pulumi.String("projects/my-project-name"),
    			Description: pulumi.String("Description"),
    			DisplayName: pulumi.String("Displayname"),
    			Triggers: dataloss.PreventionJobTriggerTriggerArray{
    				&dataloss.PreventionJobTriggerTriggerArgs{
    					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
    						RecurrencePeriodDuration: pulumi.String("86400s"),
    					},
    				},
    			},
    			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
    				InspectTemplateName: pulumi.String("fake"),
    				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
    					&dataloss.PreventionJobTriggerInspectJobActionArgs{
    						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
    							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
    								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
    									ProjectId: pulumi.String("project"),
    									DatasetId: pulumi.String("dataset"),
    								},
    							},
    						},
    					},
    					&dataloss.PreventionJobTriggerInspectJobActionArgs{
    						PubSub: &dataloss.PreventionJobTriggerInspectJobActionPubSubArgs{
    							Topic: pulumi.String("projects/project/topics/topic-name"),
    						},
    					},
    				},
    				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
    					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
    						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
    							Url: pulumi.String("gs://mybucket/directory/"),
    						},
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var basic = new Gcp.DataLoss.PreventionJobTrigger("basic", new()
        {
            Parent = "projects/my-project-name",
            Description = "Description",
            DisplayName = "Displayname",
            Triggers = new[]
            {
                new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
                {
                    Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
                    {
                        RecurrencePeriodDuration = "86400s",
                    },
                },
            },
            InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
            {
                InspectTemplateName = "fake",
                Actions = new[]
                {
                    new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
                    {
                        SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
                        {
                            OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
                            {
                                Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
                                {
                                    ProjectId = "project",
                                    DatasetId = "dataset",
                                },
                            },
                        },
                    },
                    new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
                    {
                        PubSub = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionPubSubArgs
                        {
                            Topic = "projects/project/topics/topic-name",
                        },
                    },
                },
                StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
                {
                    CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
                    {
                        FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
                        {
                            Url = "gs://mybucket/directory/",
                        },
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataloss.PreventionJobTrigger;
    import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var basic = new PreventionJobTrigger("basic", PreventionJobTriggerArgs.builder()        
                .parent("projects/my-project-name")
                .description("Description")
                .displayName("Displayname")
                .triggers(PreventionJobTriggerTriggerArgs.builder()
                    .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
                        .recurrencePeriodDuration("86400s")
                        .build())
                    .build())
                .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
                    .inspectTemplateName("fake")
                    .actions(                
                        PreventionJobTriggerInspectJobActionArgs.builder()
                            .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
                                .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
                                    .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
                                        .projectId("project")
                                        .datasetId("dataset")
                                        .build())
                                    .build())
                                .build())
                            .build(),
                        PreventionJobTriggerInspectJobActionArgs.builder()
                            .pubSub(PreventionJobTriggerInspectJobActionPubSubArgs.builder()
                                .topic("projects/project/topics/topic-name")
                                .build())
                            .build())
                    .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
                        .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
                            .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
                                .url("gs://mybucket/directory/")
                                .build())
                            .build())
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      basic:
        type: gcp:dataloss:PreventionJobTrigger
        properties:
          parent: projects/my-project-name
          description: Description
          displayName: Displayname
          triggers:
            - schedule:
                recurrencePeriodDuration: 86400s
          inspectJob:
            inspectTemplateName: fake
            actions:
              - saveFindings:
                  outputConfig:
                    table:
                      projectId: project
                      datasetId: dataset
              - pubSub:
                  topic: projects/project/topics/topic-name
            storageConfig:
              cloudStorageOptions:
                fileSet:
                  url: gs://mybucket/directory/
    

    Dlp Job Trigger Cloud Storage Optional Timespan Autopopulation

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const basic = new gcp.dataloss.PreventionJobTrigger("basic", {
        parent: "projects/my-project-name",
        description: "Description",
        displayName: "Displayname",
        triggers: [{
            schedule: {
                recurrencePeriodDuration: "86400s",
            },
        }],
        inspectJob: {
            inspectTemplateName: "fake",
            actions: [{
                saveFindings: {
                    outputConfig: {
                        table: {
                            projectId: "project",
                            datasetId: "dataset",
                        },
                    },
                },
            }],
            storageConfig: {
                timespanConfig: {
                    enableAutoPopulationOfTimespanConfig: true,
                },
                cloudStorageOptions: {
                    fileSet: {
                        url: "gs://mybucket/directory/",
                    },
                },
            },
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    basic = gcp.dataloss.PreventionJobTrigger("basic",
        parent="projects/my-project-name",
        description="Description",
        display_name="Displayname",
        triggers=[gcp.dataloss.PreventionJobTriggerTriggerArgs(
            schedule=gcp.dataloss.PreventionJobTriggerTriggerScheduleArgs(
                recurrence_period_duration="86400s",
            ),
        )],
        inspect_job=gcp.dataloss.PreventionJobTriggerInspectJobArgs(
            inspect_template_name="fake",
            actions=[gcp.dataloss.PreventionJobTriggerInspectJobActionArgs(
                save_findings=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs(
                    output_config=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs(
                        table=gcp.dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs(
                            project_id="project",
                            dataset_id="dataset",
                        ),
                    ),
                ),
            )],
            storage_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigArgs(
                timespan_config=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigTimespanConfigArgs(
                    enable_auto_population_of_timespan_config=True,
                ),
                cloud_storage_options=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs(
                    file_set=gcp.dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs(
                        url="gs://mybucket/directory/",
                    ),
                ),
            ),
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/dataloss"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dataloss.NewPreventionJobTrigger(ctx, "basic", &dataloss.PreventionJobTriggerArgs{
    			Parent:      pulumi.String("projects/my-project-name"),
    			Description: pulumi.String("Description"),
    			DisplayName: pulumi.String("Displayname"),
    			Triggers: dataloss.PreventionJobTriggerTriggerArray{
    				&dataloss.PreventionJobTriggerTriggerArgs{
    					Schedule: &dataloss.PreventionJobTriggerTriggerScheduleArgs{
    						RecurrencePeriodDuration: pulumi.String("86400s"),
    					},
    				},
    			},
    			InspectJob: &dataloss.PreventionJobTriggerInspectJobArgs{
    				InspectTemplateName: pulumi.String("fake"),
    				Actions: dataloss.PreventionJobTriggerInspectJobActionArray{
    					&dataloss.PreventionJobTriggerInspectJobActionArgs{
    						SaveFindings: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsArgs{
    							OutputConfig: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs{
    								Table: &dataloss.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs{
    									ProjectId: pulumi.String("project"),
    									DatasetId: pulumi.String("dataset"),
    								},
    							},
    						},
    					},
    				},
    				StorageConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigArgs{
    					TimespanConfig: &dataloss.PreventionJobTriggerInspectJobStorageConfigTimespanConfigArgs{
    						EnableAutoPopulationOfTimespanConfig: pulumi.Bool(true),
    					},
    					CloudStorageOptions: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs{
    						FileSet: &dataloss.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs{
    							Url: pulumi.String("gs://mybucket/directory/"),
    						},
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var basic = new Gcp.DataLoss.PreventionJobTrigger("basic", new()
        {
            Parent = "projects/my-project-name",
            Description = "Description",
            DisplayName = "Displayname",
            Triggers = new[]
            {
                new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerArgs
                {
                    Schedule = new Gcp.DataLoss.Inputs.PreventionJobTriggerTriggerScheduleArgs
                    {
                        RecurrencePeriodDuration = "86400s",
                    },
                },
            },
            InspectJob = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobArgs
            {
                InspectTemplateName = "fake",
                Actions = new[]
                {
                    new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionArgs
                    {
                        SaveFindings = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsArgs
                        {
                            OutputConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs
                            {
                                Table = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs
                                {
                                    ProjectId = "project",
                                    DatasetId = "dataset",
                                },
                            },
                        },
                    },
                },
                StorageConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigArgs
                {
                    TimespanConfig = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigTimespanConfigArgs
                    {
                        EnableAutoPopulationOfTimespanConfig = true,
                    },
                    CloudStorageOptions = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs
                    {
                        FileSet = new Gcp.DataLoss.Inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs
                        {
                            Url = "gs://mybucket/directory/",
                        },
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataloss.PreventionJobTrigger;
    import com.pulumi.gcp.dataloss.PreventionJobTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerTriggerScheduleArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigTimespanConfigArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs;
    import com.pulumi.gcp.dataloss.inputs.PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var basic = new PreventionJobTrigger("basic", PreventionJobTriggerArgs.builder()        
                .parent("projects/my-project-name")
                .description("Description")
                .displayName("Displayname")
                .triggers(PreventionJobTriggerTriggerArgs.builder()
                    .schedule(PreventionJobTriggerTriggerScheduleArgs.builder()
                        .recurrencePeriodDuration("86400s")
                        .build())
                    .build())
                .inspectJob(PreventionJobTriggerInspectJobArgs.builder()
                    .inspectTemplateName("fake")
                    .actions(PreventionJobTriggerInspectJobActionArgs.builder()
                        .saveFindings(PreventionJobTriggerInspectJobActionSaveFindingsArgs.builder()
                            .outputConfig(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs.builder()
                                .table(PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs.builder()
                                    .projectId("project")
                                    .datasetId("dataset")
                                    .build())
                                .build())
                            .build())
                        .build())
                    .storageConfig(PreventionJobTriggerInspectJobStorageConfigArgs.builder()
                        .timespanConfig(PreventionJobTriggerInspectJobStorageConfigTimespanConfigArgs.builder()
                            .enableAutoPopulationOfTimespanConfig(true)
                            .build())
                        .cloudStorageOptions(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs.builder()
                            .fileSet(PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs.builder()
                                .url("gs://mybucket/directory/")
                                .build())
                            .build())
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      basic:
        type: gcp:dataloss:PreventionJobTrigger
        properties:
          parent: projects/my-project-name
          description: Description
          displayName: Displayname
          triggers:
            - schedule:
                recurrencePeriodDuration: 86400s
          inspectJob:
            inspectTemplateName: fake
            actions:
              - saveFindings:
                  outputConfig:
                    table:
                      projectId: project
                      datasetId: dataset
            storageConfig:
              timespanConfig:
                enableAutoPopulationOfTimespanConfig: true
              cloudStorageOptions:
                fileSet:
                  url: gs://mybucket/directory/
    

    Create PreventionJobTrigger Resource

    new PreventionJobTrigger(name: string, args: PreventionJobTriggerArgs, opts?: CustomResourceOptions);
    @overload
    def PreventionJobTrigger(resource_name: str,
                             opts: Optional[ResourceOptions] = None,
                             description: Optional[str] = None,
                             display_name: Optional[str] = None,
                             inspect_job: Optional[PreventionJobTriggerInspectJobArgs] = None,
                             parent: Optional[str] = None,
                             status: Optional[str] = None,
                             trigger_id: Optional[str] = None,
                             triggers: Optional[Sequence[PreventionJobTriggerTriggerArgs]] = None)
    @overload
    def PreventionJobTrigger(resource_name: str,
                             args: PreventionJobTriggerArgs,
                             opts: Optional[ResourceOptions] = None)
    func NewPreventionJobTrigger(ctx *Context, name string, args PreventionJobTriggerArgs, opts ...ResourceOption) (*PreventionJobTrigger, error)
    public PreventionJobTrigger(string name, PreventionJobTriggerArgs args, CustomResourceOptions? opts = null)
    public PreventionJobTrigger(String name, PreventionJobTriggerArgs args)
    public PreventionJobTrigger(String name, PreventionJobTriggerArgs args, CustomResourceOptions options)
    
    type: gcp:dataloss:PreventionJobTrigger
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    
    name string
    The unique name of the resource.
    args PreventionJobTriggerArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args PreventionJobTriggerArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args PreventionJobTriggerArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args PreventionJobTriggerArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args PreventionJobTriggerArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    PreventionJobTrigger Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The PreventionJobTrigger resource accepts the following input properties:

    Parent string
    The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}
    Triggers List<PreventionJobTriggerTrigger>
    What event needs to occur for a new job to be started. Structure is documented below.
    Description string
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    DisplayName string
    User set display name of the job trigger.
    InspectJob PreventionJobTriggerInspectJob
    Controls what and how to inspect for findings. Structure is documented below.
    Status string
    Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.
    TriggerId string
    The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
    Parent string
    The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}
    Triggers []PreventionJobTriggerTriggerArgs
    What event needs to occur for a new job to be started. Structure is documented below.
    Description string
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    DisplayName string
    User set display name of the job trigger.
    InspectJob PreventionJobTriggerInspectJobArgs
    Controls what and how to inspect for findings. Structure is documented below.
    Status string
    Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.
    TriggerId string
    The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
    parent String
    The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}
    triggers List<PreventionJobTriggerTrigger>
    What event needs to occur for a new job to be started. Structure is documented below.
    description String
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    displayName String
    User set display name of the job trigger.
    inspectJob PreventionJobTriggerInspectJob
    Controls what and how to inspect for findings. Structure is documented below.
    status String
    Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.
    triggerId String
    The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
    parent string
    The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}
    triggers PreventionJobTriggerTrigger[]
    What event needs to occur for a new job to be started. Structure is documented below.
    description string
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    displayName string
    User set display name of the job trigger.
    inspectJob PreventionJobTriggerInspectJob
    Controls what and how to inspect for findings. Structure is documented below.
    status string
    Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.
    triggerId string
    The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
    parent str
    The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}
    triggers Sequence[PreventionJobTriggerTriggerArgs]
    What event needs to occur for a new job to be started. Structure is documented below.
    description str
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    display_name str
    User set display name of the job trigger.
    inspect_job PreventionJobTriggerInspectJobArgs
    Controls what and how to inspect for findings. Structure is documented below.
    status str
    Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.
    trigger_id str
    The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
    parent String
    The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}
    triggers List<Property Map>
    What event needs to occur for a new job to be started. Structure is documented below.
    description String
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    displayName String
    User set display name of the job trigger.
    inspectJob Property Map
    Controls what and how to inspect for findings. Structure is documented below.
    status String
    Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.
    triggerId String
    The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the PreventionJobTrigger resource produces the following output properties:

    CreateTime string
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    Id string
    The provider-assigned unique ID for this managed resource.
    LastRunTime string
    The timestamp of the last time this trigger executed.
    Name string
    Name describing the field excluded from scanning.
    UpdateTime string
    The last update timestamp of an inspectTemplate. Set by the server.
    CreateTime string
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    Id string
    The provider-assigned unique ID for this managed resource.
    LastRunTime string
    The timestamp of the last time this trigger executed.
    Name string
    Name describing the field excluded from scanning.
    UpdateTime string
    The last update timestamp of an inspectTemplate. Set by the server.
    createTime String
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    id String
    The provider-assigned unique ID for this managed resource.
    lastRunTime String
    The timestamp of the last time this trigger executed.
    name String
    Name describing the field excluded from scanning.
    updateTime String
    The last update timestamp of an inspectTemplate. Set by the server.
    createTime string
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    id string
    The provider-assigned unique ID for this managed resource.
    lastRunTime string
    The timestamp of the last time this trigger executed.
    name string
    Name describing the field excluded from scanning.
    updateTime string
    The last update timestamp of an inspectTemplate. Set by the server.
    create_time str
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    id str
    The provider-assigned unique ID for this managed resource.
    last_run_time str
    The timestamp of the last time this trigger executed.
    name str
    Name describing the field excluded from scanning.
    update_time str
    The last update timestamp of an inspectTemplate. Set by the server.
    createTime String
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    id String
    The provider-assigned unique ID for this managed resource.
    lastRunTime String
    The timestamp of the last time this trigger executed.
    name String
    Name describing the field excluded from scanning.
    updateTime String
    The last update timestamp of an inspectTemplate. Set by the server.

    Look up Existing PreventionJobTrigger Resource

    Get an existing PreventionJobTrigger resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: PreventionJobTriggerState, opts?: CustomResourceOptions): PreventionJobTrigger
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            create_time: Optional[str] = None,
            description: Optional[str] = None,
            display_name: Optional[str] = None,
            inspect_job: Optional[PreventionJobTriggerInspectJobArgs] = None,
            last_run_time: Optional[str] = None,
            name: Optional[str] = None,
            parent: Optional[str] = None,
            status: Optional[str] = None,
            trigger_id: Optional[str] = None,
            triggers: Optional[Sequence[PreventionJobTriggerTriggerArgs]] = None,
            update_time: Optional[str] = None) -> PreventionJobTrigger
    func GetPreventionJobTrigger(ctx *Context, name string, id IDInput, state *PreventionJobTriggerState, opts ...ResourceOption) (*PreventionJobTrigger, error)
    public static PreventionJobTrigger Get(string name, Input<string> id, PreventionJobTriggerState? state, CustomResourceOptions? opts = null)
    public static PreventionJobTrigger get(String name, Output<String> id, PreventionJobTriggerState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    CreateTime string
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    Description string
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    DisplayName string
    User set display name of the job trigger.
    InspectJob PreventionJobTriggerInspectJob
    Controls what and how to inspect for findings. Structure is documented below.
    LastRunTime string
    The timestamp of the last time this trigger executed.
    Name string
    Name describing the field excluded from scanning.
    Parent string
    The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}
    Status string
    Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.
    TriggerId string
    The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
    Triggers List<PreventionJobTriggerTrigger>
    What event needs to occur for a new job to be started. Structure is documented below.
    UpdateTime string
    The last update timestamp of an inspectTemplate. Set by the server.
    CreateTime string
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    Description string
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    DisplayName string
    User set display name of the job trigger.
    InspectJob PreventionJobTriggerInspectJobArgs
    Controls what and how to inspect for findings. Structure is documented below.
    LastRunTime string
    The timestamp of the last time this trigger executed.
    Name string
    Name describing the field excluded from scanning.
    Parent string
    The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}
    Status string
    Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.
    TriggerId string
    The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
    Triggers []PreventionJobTriggerTriggerArgs
    What event needs to occur for a new job to be started. Structure is documented below.
    UpdateTime string
    The last update timestamp of an inspectTemplate. Set by the server.
    createTime String
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    description String
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    displayName String
    User set display name of the job trigger.
    inspectJob PreventionJobTriggerInspectJob
    Controls what and how to inspect for findings. Structure is documented below.
    lastRunTime String
    The timestamp of the last time this trigger executed.
    name String
    Name describing the field excluded from scanning.
    parent String
    The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}
    status String
    Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.
    triggerId String
    The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
    triggers List<PreventionJobTriggerTrigger>
    What event needs to occur for a new job to be started. Structure is documented below.
    updateTime String
    The last update timestamp of an inspectTemplate. Set by the server.
    createTime string
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    description string
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    displayName string
    User set display name of the job trigger.
    inspectJob PreventionJobTriggerInspectJob
    Controls what and how to inspect for findings. Structure is documented below.
    lastRunTime string
    The timestamp of the last time this trigger executed.
    name string
    Name describing the field excluded from scanning.
    parent string
    The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}
    status string
    Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.
    triggerId string
    The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
    triggers PreventionJobTriggerTrigger[]
    What event needs to occur for a new job to be started. Structure is documented below.
    updateTime string
    The last update timestamp of an inspectTemplate. Set by the server.
    create_time str
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    description str
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    display_name str
    User set display name of the job trigger.
    inspect_job PreventionJobTriggerInspectJobArgs
    Controls what and how to inspect for findings. Structure is documented below.
    last_run_time str
    The timestamp of the last time this trigger executed.
    name str
    Name describing the field excluded from scanning.
    parent str
    The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}
    status str
    Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.
    trigger_id str
    The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
    triggers Sequence[PreventionJobTriggerTriggerArgs]
    What event needs to occur for a new job to be started. Structure is documented below.
    update_time str
    The last update timestamp of an inspectTemplate. Set by the server.
    createTime String
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    description String
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    displayName String
    User set display name of the job trigger.
    inspectJob Property Map
    Controls what and how to inspect for findings. Structure is documented below.
    lastRunTime String
    The timestamp of the last time this trigger executed.
    name String
    Name describing the field excluded from scanning.
    parent String
    The parent of the trigger, either in the format projects/{{project}} or projects/{{project}}/locations/{{location}}
    status String
    Whether the trigger is currently active. Default value is HEALTHY. Possible values are: PAUSED, HEALTHY, CANCELLED.
    triggerId String
    The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 characters. Can be empty to allow the system to generate one.
    triggers List<Property Map>
    What event needs to occur for a new job to be started. Structure is documented below.
    updateTime String
    The last update timestamp of an inspectTemplate. Set by the server.

    Supporting Types

    PreventionJobTriggerInspectJob, PreventionJobTriggerInspectJobArgs

    StorageConfig PreventionJobTriggerInspectJobStorageConfig
    Information on where to inspect Structure is documented below.
    Actions List<PreventionJobTriggerInspectJobAction>
    Configuration block for the actions to execute on the completion of a job. Can be specified multiple times, but only one for each type. Each action block supports fields documented below. This argument is processed in attribute-as-blocks mode. Structure is documented below.
    InspectConfig PreventionJobTriggerInspectJobInspectConfig
    The core content of the template. Structure is documented below.
    InspectTemplateName string
    The name of the template to run when this job is triggered.
    StorageConfig PreventionJobTriggerInspectJobStorageConfig
    Information on where to inspect Structure is documented below.
    Actions []PreventionJobTriggerInspectJobAction
    Configuration block for the actions to execute on the completion of a job. Can be specified multiple times, but only one for each type. Each action block supports fields documented below. This argument is processed in attribute-as-blocks mode. Structure is documented below.
    InspectConfig PreventionJobTriggerInspectJobInspectConfig
    The core content of the template. Structure is documented below.
    InspectTemplateName string
    The name of the template to run when this job is triggered.
    storageConfig PreventionJobTriggerInspectJobStorageConfig
    Information on where to inspect Structure is documented below.
    actions List<PreventionJobTriggerInspectJobAction>
    Configuration block for the actions to execute on the completion of a job. Can be specified multiple times, but only one for each type. Each action block supports fields documented below. This argument is processed in attribute-as-blocks mode. Structure is documented below.
    inspectConfig PreventionJobTriggerInspectJobInspectConfig
    The core content of the template. Structure is documented below.
    inspectTemplateName String
    The name of the template to run when this job is triggered.
    storageConfig PreventionJobTriggerInspectJobStorageConfig
    Information on where to inspect Structure is documented below.
    actions PreventionJobTriggerInspectJobAction[]
    Configuration block for the actions to execute on the completion of a job. Can be specified multiple times, but only one for each type. Each action block supports fields documented below. This argument is processed in attribute-as-blocks mode. Structure is documented below.
    inspectConfig PreventionJobTriggerInspectJobInspectConfig
    The core content of the template. Structure is documented below.
    inspectTemplateName string
    The name of the template to run when this job is triggered.
    storage_config PreventionJobTriggerInspectJobStorageConfig
    Information on where to inspect Structure is documented below.
    actions Sequence[PreventionJobTriggerInspectJobAction]
    Configuration block for the actions to execute on the completion of a job. Can be specified multiple times, but only one for each type. Each action block supports fields documented below. This argument is processed in attribute-as-blocks mode. Structure is documented below.
    inspect_config PreventionJobTriggerInspectJobInspectConfig
    The core content of the template. Structure is documented below.
    inspect_template_name str
    The name of the template to run when this job is triggered.
    storageConfig Property Map
    Information on where to inspect Structure is documented below.
    actions List<Property Map>
    Configuration block for the actions to execute on the completion of a job. Can be specified multiple times, but only one for each type. Each action block supports fields documented below. This argument is processed in attribute-as-blocks mode. Structure is documented below.
    inspectConfig Property Map
    The core content of the template. Structure is documented below.
    inspectTemplateName String
    The name of the template to run when this job is triggered.

    PreventionJobTriggerInspectJobAction, PreventionJobTriggerInspectJobActionArgs

    Deidentify PreventionJobTriggerInspectJobActionDeidentify
    Create a de-identified copy of the requested table or files. Structure is documented below.
    JobNotificationEmails PreventionJobTriggerInspectJobActionJobNotificationEmails
    Sends an email when the job completes. The email goes to IAM project owners and technical Essential Contacts.
    PubSub PreventionJobTriggerInspectJobActionPubSub
    Publish a message into a given Pub/Sub topic when the job completes. Structure is documented below.
    PublishFindingsToCloudDataCatalog PreventionJobTriggerInspectJobActionPublishFindingsToCloudDataCatalog
    Publish findings of a DlpJob to Data Catalog.
    PublishSummaryToCscc PreventionJobTriggerInspectJobActionPublishSummaryToCscc
    Publish the result summary of a DlpJob to the Cloud Security Command Center.
    PublishToStackdriver PreventionJobTriggerInspectJobActionPublishToStackdriver
    Enable Stackdriver metric dlp.googleapis.com/findingCount.
    SaveFindings PreventionJobTriggerInspectJobActionSaveFindings
    If set, the detailed findings will be persisted to the specified OutputStorageConfig. Only a single instance of this action can be specified. Compatible with: Inspect, Risk Structure is documented below.
    Deidentify PreventionJobTriggerInspectJobActionDeidentify
    Create a de-identified copy of the requested table or files. Structure is documented below.
    JobNotificationEmails PreventionJobTriggerInspectJobActionJobNotificationEmails
    Sends an email when the job completes. The email goes to IAM project owners and technical Essential Contacts.
    PubSub PreventionJobTriggerInspectJobActionPubSub
    Publish a message into a given Pub/Sub topic when the job completes. Structure is documented below.
    PublishFindingsToCloudDataCatalog PreventionJobTriggerInspectJobActionPublishFindingsToCloudDataCatalog
    Publish findings of a DlpJob to Data Catalog.
    PublishSummaryToCscc PreventionJobTriggerInspectJobActionPublishSummaryToCscc
    Publish the result summary of a DlpJob to the Cloud Security Command Center.
    PublishToStackdriver PreventionJobTriggerInspectJobActionPublishToStackdriver
    Enable Stackdriver metric dlp.googleapis.com/findingCount.
    SaveFindings PreventionJobTriggerInspectJobActionSaveFindings
    If set, the detailed findings will be persisted to the specified OutputStorageConfig. Only a single instance of this action can be specified. Compatible with: Inspect, Risk Structure is documented below.
    deidentify PreventionJobTriggerInspectJobActionDeidentify
    Create a de-identified copy of the requested table or files. Structure is documented below.
    jobNotificationEmails PreventionJobTriggerInspectJobActionJobNotificationEmails
    Sends an email when the job completes. The email goes to IAM project owners and technical Essential Contacts.
    pubSub PreventionJobTriggerInspectJobActionPubSub
    Publish a message into a given Pub/Sub topic when the job completes. Structure is documented below.
    publishFindingsToCloudDataCatalog PreventionJobTriggerInspectJobActionPublishFindingsToCloudDataCatalog
    Publish findings of a DlpJob to Data Catalog.
    publishSummaryToCscc PreventionJobTriggerInspectJobActionPublishSummaryToCscc
    Publish the result summary of a DlpJob to the Cloud Security Command Center.
    publishToStackdriver PreventionJobTriggerInspectJobActionPublishToStackdriver
    Enable Stackdriver metric dlp.googleapis.com/findingCount.
    saveFindings PreventionJobTriggerInspectJobActionSaveFindings
    If set, the detailed findings will be persisted to the specified OutputStorageConfig. Only a single instance of this action can be specified. Compatible with: Inspect, Risk Structure is documented below.
    deidentify PreventionJobTriggerInspectJobActionDeidentify
    Create a de-identified copy of the requested table or files. Structure is documented below.
    jobNotificationEmails PreventionJobTriggerInspectJobActionJobNotificationEmails
    Sends an email when the job completes. The email goes to IAM project owners and technical Essential Contacts.
    pubSub PreventionJobTriggerInspectJobActionPubSub
    Publish a message into a given Pub/Sub topic when the job completes. Structure is documented below.
    publishFindingsToCloudDataCatalog PreventionJobTriggerInspectJobActionPublishFindingsToCloudDataCatalog
    Publish findings of a DlpJob to Data Catalog.
    publishSummaryToCscc PreventionJobTriggerInspectJobActionPublishSummaryToCscc
    Publish the result summary of a DlpJob to the Cloud Security Command Center.
    publishToStackdriver PreventionJobTriggerInspectJobActionPublishToStackdriver
    Enable Stackdriver metric dlp.googleapis.com/findingCount.
    saveFindings PreventionJobTriggerInspectJobActionSaveFindings
    If set, the detailed findings will be persisted to the specified OutputStorageConfig. Only a single instance of this action can be specified. Compatible with: Inspect, Risk Structure is documented below.
    deidentify PreventionJobTriggerInspectJobActionDeidentify
    Create a de-identified copy of the requested table or files. Structure is documented below.
    job_notification_emails PreventionJobTriggerInspectJobActionJobNotificationEmails
    Sends an email when the job completes. The email goes to IAM project owners and technical Essential Contacts.
    pub_sub PreventionJobTriggerInspectJobActionPubSub
    Publish a message into a given Pub/Sub topic when the job completes. Structure is documented below.
    publish_findings_to_cloud_data_catalog PreventionJobTriggerInspectJobActionPublishFindingsToCloudDataCatalog
    Publish findings of a DlpJob to Data Catalog.
    publish_summary_to_cscc PreventionJobTriggerInspectJobActionPublishSummaryToCscc
    Publish the result summary of a DlpJob to the Cloud Security Command Center.
    publish_to_stackdriver PreventionJobTriggerInspectJobActionPublishToStackdriver
    Enable Stackdriver metric dlp.googleapis.com/findingCount.
    save_findings PreventionJobTriggerInspectJobActionSaveFindings
    If set, the detailed findings will be persisted to the specified OutputStorageConfig. Only a single instance of this action can be specified. Compatible with: Inspect, Risk Structure is documented below.
    deidentify Property Map
    Create a de-identified copy of the requested table or files. Structure is documented below.
    jobNotificationEmails Property Map
    Sends an email when the job completes. The email goes to IAM project owners and technical Essential Contacts.
    pubSub Property Map
    Publish a message into a given Pub/Sub topic when the job completes. Structure is documented below.
    publishFindingsToCloudDataCatalog Property Map
    Publish findings of a DlpJob to Data Catalog.
    publishSummaryToCscc Property Map
    Publish the result summary of a DlpJob to the Cloud Security Command Center.
    publishToStackdriver Property Map
    Enable Stackdriver metric dlp.googleapis.com/findingCount.
    saveFindings Property Map
    If set, the detailed findings will be persisted to the specified OutputStorageConfig. Only a single instance of this action can be specified. Compatible with: Inspect, Risk Structure is documented below.

    PreventionJobTriggerInspectJobActionDeidentify, PreventionJobTriggerInspectJobActionDeidentifyArgs

    CloudStorageOutput string
    User settable Cloud Storage bucket and folders to store de-identified files. This field must be set for cloud storage deidentification. The output Cloud Storage bucket must be different from the input bucket. De-identified files will overwrite files in the output path. Form of: gs://bucket/folder/ or gs://bucket
    FileTypesToTransforms List<string>
    List of user-specified file type groups to transform. If specified, only the files with these filetypes will be transformed. If empty, all supported files will be transformed. Supported types may be automatically added over time. If a file type is set in this field that isn't supported by the Deidentify action then the job will fail and will not be successfully created/started. Each value may be one of: IMAGE, TEXT_FILE, CSV, TSV.
    TransformationConfig PreventionJobTriggerInspectJobActionDeidentifyTransformationConfig
    User specified deidentify templates and configs for structured, unstructured, and image files. Structure is documented below.
    TransformationDetailsStorageConfig PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfig
    Config for storing transformation details. Structure is documented below.
    CloudStorageOutput string
    User settable Cloud Storage bucket and folders to store de-identified files. This field must be set for cloud storage deidentification. The output Cloud Storage bucket must be different from the input bucket. De-identified files will overwrite files in the output path. Form of: gs://bucket/folder/ or gs://bucket
    FileTypesToTransforms []string
    List of user-specified file type groups to transform. If specified, only the files with these filetypes will be transformed. If empty, all supported files will be transformed. Supported types may be automatically added over time. If a file type is set in this field that isn't supported by the Deidentify action then the job will fail and will not be successfully created/started. Each value may be one of: IMAGE, TEXT_FILE, CSV, TSV.
    TransformationConfig PreventionJobTriggerInspectJobActionDeidentifyTransformationConfig
    User specified deidentify templates and configs for structured, unstructured, and image files. Structure is documented below.
    TransformationDetailsStorageConfig PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfig
    Config for storing transformation details. Structure is documented below.
    cloudStorageOutput String
    User settable Cloud Storage bucket and folders to store de-identified files. This field must be set for cloud storage deidentification. The output Cloud Storage bucket must be different from the input bucket. De-identified files will overwrite files in the output path. Form of: gs://bucket/folder/ or gs://bucket
    fileTypesToTransforms List<String>
    List of user-specified file type groups to transform. If specified, only the files with these filetypes will be transformed. If empty, all supported files will be transformed. Supported types may be automatically added over time. If a file type is set in this field that isn't supported by the Deidentify action then the job will fail and will not be successfully created/started. Each value may be one of: IMAGE, TEXT_FILE, CSV, TSV.
    transformationConfig PreventionJobTriggerInspectJobActionDeidentifyTransformationConfig
    User specified deidentify templates and configs for structured, unstructured, and image files. Structure is documented below.
    transformationDetailsStorageConfig PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfig
    Config for storing transformation details. Structure is documented below.
    cloudStorageOutput string
    User settable Cloud Storage bucket and folders to store de-identified files. This field must be set for cloud storage deidentification. The output Cloud Storage bucket must be different from the input bucket. De-identified files will overwrite files in the output path. Form of: gs://bucket/folder/ or gs://bucket
    fileTypesToTransforms string[]
    List of user-specified file type groups to transform. If specified, only the files with these filetypes will be transformed. If empty, all supported files will be transformed. Supported types may be automatically added over time. If a file type is set in this field that isn't supported by the Deidentify action then the job will fail and will not be successfully created/started. Each value may be one of: IMAGE, TEXT_FILE, CSV, TSV.
    transformationConfig PreventionJobTriggerInspectJobActionDeidentifyTransformationConfig
    User specified deidentify templates and configs for structured, unstructured, and image files. Structure is documented below.
    transformationDetailsStorageConfig PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfig
    Config for storing transformation details. Structure is documented below.
    cloud_storage_output str
    User settable Cloud Storage bucket and folders to store de-identified files. This field must be set for cloud storage deidentification. The output Cloud Storage bucket must be different from the input bucket. De-identified files will overwrite files in the output path. Form of: gs://bucket/folder/ or gs://bucket
    file_types_to_transforms Sequence[str]
    List of user-specified file type groups to transform. If specified, only the files with these filetypes will be transformed. If empty, all supported files will be transformed. Supported types may be automatically added over time. If a file type is set in this field that isn't supported by the Deidentify action then the job will fail and will not be successfully created/started. Each value may be one of: IMAGE, TEXT_FILE, CSV, TSV.
    transformation_config PreventionJobTriggerInspectJobActionDeidentifyTransformationConfig
    User specified deidentify templates and configs for structured, unstructured, and image files. Structure is documented below.
    transformation_details_storage_config PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfig
    Config for storing transformation details. Structure is documented below.
    cloudStorageOutput String
    User settable Cloud Storage bucket and folders to store de-identified files. This field must be set for cloud storage deidentification. The output Cloud Storage bucket must be different from the input bucket. De-identified files will overwrite files in the output path. Form of: gs://bucket/folder/ or gs://bucket
    fileTypesToTransforms List<String>
    List of user-specified file type groups to transform. If specified, only the files with these filetypes will be transformed. If empty, all supported files will be transformed. Supported types may be automatically added over time. If a file type is set in this field that isn't supported by the Deidentify action then the job will fail and will not be successfully created/started. Each value may be one of: IMAGE, TEXT_FILE, CSV, TSV.
    transformationConfig Property Map
    User specified deidentify templates and configs for structured, unstructured, and image files. Structure is documented below.
    transformationDetailsStorageConfig Property Map
    Config for storing transformation details. Structure is documented below.

    PreventionJobTriggerInspectJobActionDeidentifyTransformationConfig, PreventionJobTriggerInspectJobActionDeidentifyTransformationConfigArgs

    DeidentifyTemplate string
    If this template is specified, it will serve as the default de-identify template.
    ImageRedactTemplate string
    If this template is specified, it will serve as the de-identify template for images.
    StructuredDeidentifyTemplate string
    If this template is specified, it will serve as the de-identify template for structured content such as delimited files and tables.
    DeidentifyTemplate string
    If this template is specified, it will serve as the default de-identify template.
    ImageRedactTemplate string
    If this template is specified, it will serve as the de-identify template for images.
    StructuredDeidentifyTemplate string
    If this template is specified, it will serve as the de-identify template for structured content such as delimited files and tables.
    deidentifyTemplate String
    If this template is specified, it will serve as the default de-identify template.
    imageRedactTemplate String
    If this template is specified, it will serve as the de-identify template for images.
    structuredDeidentifyTemplate String
    If this template is specified, it will serve as the de-identify template for structured content such as delimited files and tables.
    deidentifyTemplate string
    If this template is specified, it will serve as the default de-identify template.
    imageRedactTemplate string
    If this template is specified, it will serve as the de-identify template for images.
    structuredDeidentifyTemplate string
    If this template is specified, it will serve as the de-identify template for structured content such as delimited files and tables.
    deidentify_template str
    If this template is specified, it will serve as the default de-identify template.
    image_redact_template str
    If this template is specified, it will serve as the de-identify template for images.
    structured_deidentify_template str
    If this template is specified, it will serve as the de-identify template for structured content such as delimited files and tables.
    deidentifyTemplate String
    If this template is specified, it will serve as the default de-identify template.
    imageRedactTemplate String
    If this template is specified, it will serve as the de-identify template for images.
    structuredDeidentifyTemplate String
    If this template is specified, it will serve as the de-identify template for structured content such as delimited files and tables.

    PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfig, PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigArgs

    Table PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTable
    The BigQuery table in which to store the output. Structure is documented below.
    Table PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTable
    The BigQuery table in which to store the output. Structure is documented below.
    table PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTable
    The BigQuery table in which to store the output. Structure is documented below.
    table PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTable
    The BigQuery table in which to store the output. Structure is documented below.
    table PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTable
    The BigQuery table in which to store the output. Structure is documented below.
    table Property Map
    The BigQuery table in which to store the output. Structure is documented below.

    PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTable, PreventionJobTriggerInspectJobActionDeidentifyTransformationDetailsStorageConfigTableArgs

    DatasetId string
    The ID of the dataset containing this table.
    ProjectId string
    The ID of the project containing this table.
    TableId string
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    DatasetId string
    The ID of the dataset containing this table.
    ProjectId string
    The ID of the project containing this table.
    TableId string
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    datasetId String
    The ID of the dataset containing this table.
    projectId String
    The ID of the project containing this table.
    tableId String
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    datasetId string
    The ID of the dataset containing this table.
    projectId string
    The ID of the project containing this table.
    tableId string
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    dataset_id str
    The ID of the dataset containing this table.
    project_id str
    The ID of the project containing this table.
    table_id str
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    datasetId String
    The ID of the dataset containing this table.
    projectId String
    The ID of the project containing this table.
    tableId String
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

    PreventionJobTriggerInspectJobActionPubSub, PreventionJobTriggerInspectJobActionPubSubArgs

    Topic string
    Cloud Pub/Sub topic to send notifications to.
    Topic string
    Cloud Pub/Sub topic to send notifications to.
    topic String
    Cloud Pub/Sub topic to send notifications to.
    topic string
    Cloud Pub/Sub topic to send notifications to.
    topic str
    Cloud Pub/Sub topic to send notifications to.
    topic String
    Cloud Pub/Sub topic to send notifications to.

    PreventionJobTriggerInspectJobActionSaveFindings, PreventionJobTriggerInspectJobActionSaveFindingsArgs

    OutputConfig PreventionJobTriggerInspectJobActionSaveFindingsOutputConfig
    Information on where to store output Structure is documented below.
    OutputConfig PreventionJobTriggerInspectJobActionSaveFindingsOutputConfig
    Information on where to store output Structure is documented below.
    outputConfig PreventionJobTriggerInspectJobActionSaveFindingsOutputConfig
    Information on where to store output Structure is documented below.
    outputConfig PreventionJobTriggerInspectJobActionSaveFindingsOutputConfig
    Information on where to store output Structure is documented below.
    output_config PreventionJobTriggerInspectJobActionSaveFindingsOutputConfig
    Information on where to store output Structure is documented below.
    outputConfig Property Map
    Information on where to store output Structure is documented below.

    PreventionJobTriggerInspectJobActionSaveFindingsOutputConfig, PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigArgs

    Table PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTable
    Information on the location of the target BigQuery Table. Structure is documented below.
    OutputSchema string
    Schema used for writing the findings for Inspect jobs. This field is only used for Inspect and must be unspecified for Risk jobs. Columns are derived from the Finding object. If appending to an existing table, any columns from the predefined schema that are missing will be added. No columns in the existing table will be deleted. If unspecified, then all available columns will be used for a new table or an (existing) table with no schema, and no changes will be made to an existing table that has a schema. Only for use with external storage. Possible values are: BASIC_COLUMNS, GCS_COLUMNS, DATASTORE_COLUMNS, BIG_QUERY_COLUMNS, ALL_COLUMNS.
    Table PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTable
    Information on the location of the target BigQuery Table. Structure is documented below.
    OutputSchema string
    Schema used for writing the findings for Inspect jobs. This field is only used for Inspect and must be unspecified for Risk jobs. Columns are derived from the Finding object. If appending to an existing table, any columns from the predefined schema that are missing will be added. No columns in the existing table will be deleted. If unspecified, then all available columns will be used for a new table or an (existing) table with no schema, and no changes will be made to an existing table that has a schema. Only for use with external storage. Possible values are: BASIC_COLUMNS, GCS_COLUMNS, DATASTORE_COLUMNS, BIG_QUERY_COLUMNS, ALL_COLUMNS.
    table PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTable
    Information on the location of the target BigQuery Table. Structure is documented below.
    outputSchema String
    Schema used for writing the findings for Inspect jobs. This field is only used for Inspect and must be unspecified for Risk jobs. Columns are derived from the Finding object. If appending to an existing table, any columns from the predefined schema that are missing will be added. No columns in the existing table will be deleted. If unspecified, then all available columns will be used for a new table or an (existing) table with no schema, and no changes will be made to an existing table that has a schema. Only for use with external storage. Possible values are: BASIC_COLUMNS, GCS_COLUMNS, DATASTORE_COLUMNS, BIG_QUERY_COLUMNS, ALL_COLUMNS.
    table PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTable
    Information on the location of the target BigQuery Table. Structure is documented below.
    outputSchema string
    Schema used for writing the findings for Inspect jobs. This field is only used for Inspect and must be unspecified for Risk jobs. Columns are derived from the Finding object. If appending to an existing table, any columns from the predefined schema that are missing will be added. No columns in the existing table will be deleted. If unspecified, then all available columns will be used for a new table or an (existing) table with no schema, and no changes will be made to an existing table that has a schema. Only for use with external storage. Possible values are: BASIC_COLUMNS, GCS_COLUMNS, DATASTORE_COLUMNS, BIG_QUERY_COLUMNS, ALL_COLUMNS.
    table PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTable
    Information on the location of the target BigQuery Table. Structure is documented below.
    output_schema str
    Schema used for writing the findings for Inspect jobs. This field is only used for Inspect and must be unspecified for Risk jobs. Columns are derived from the Finding object. If appending to an existing table, any columns from the predefined schema that are missing will be added. No columns in the existing table will be deleted. If unspecified, then all available columns will be used for a new table or an (existing) table with no schema, and no changes will be made to an existing table that has a schema. Only for use with external storage. Possible values are: BASIC_COLUMNS, GCS_COLUMNS, DATASTORE_COLUMNS, BIG_QUERY_COLUMNS, ALL_COLUMNS.
    table Property Map
    Information on the location of the target BigQuery Table. Structure is documented below.
    outputSchema String
    Schema used for writing the findings for Inspect jobs. This field is only used for Inspect and must be unspecified for Risk jobs. Columns are derived from the Finding object. If appending to an existing table, any columns from the predefined schema that are missing will be added. No columns in the existing table will be deleted. If unspecified, then all available columns will be used for a new table or an (existing) table with no schema, and no changes will be made to an existing table that has a schema. Only for use with external storage. Possible values are: BASIC_COLUMNS, GCS_COLUMNS, DATASTORE_COLUMNS, BIG_QUERY_COLUMNS, ALL_COLUMNS.

    PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTable, PreventionJobTriggerInspectJobActionSaveFindingsOutputConfigTableArgs

    DatasetId string
    The ID of the dataset containing this table.
    ProjectId string
    The ID of the project containing this table.
    TableId string
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    DatasetId string
    The ID of the dataset containing this table.
    ProjectId string
    The ID of the project containing this table.
    TableId string
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    datasetId String
    The ID of the dataset containing this table.
    projectId String
    The ID of the project containing this table.
    tableId String
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    datasetId string
    The ID of the dataset containing this table.
    projectId string
    The ID of the project containing this table.
    tableId string
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    dataset_id str
    The ID of the dataset containing this table.
    project_id str
    The ID of the project containing this table.
    table_id str
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    datasetId String
    The ID of the dataset containing this table.
    projectId String
    The ID of the project containing this table.
    tableId String
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

    PreventionJobTriggerInspectJobInspectConfig, PreventionJobTriggerInspectJobInspectConfigArgs

    CustomInfoTypes List<PreventionJobTriggerInspectJobInspectConfigCustomInfoType>
    Custom info types to be used. See https://cloud.google.com/dlp/docs/creating-custom-infotypes to learn more. Structure is documented below.
    ExcludeInfoTypes bool
    When true, excludes type information of the findings.
    IncludeQuote bool
    When true, a contextual quote from the data that triggered a finding is included in the response.
    InfoTypes List<PreventionJobTriggerInspectJobInspectConfigInfoType>
    Restricts what infoTypes to look for. The values must correspond to InfoType values returned by infoTypes.list or listed at https://cloud.google.com/dlp/docs/infotypes-reference. When no InfoTypes or CustomInfoTypes are specified in a request, the system may automatically choose what detectors to run. By default this may be all types, but may change over time as detectors are updated. Structure is documented below.
    Limits PreventionJobTriggerInspectJobInspectConfigLimits
    Configuration to control the number of findings returned. Structure is documented below.
    MinLikelihood string
    Only returns findings equal or above this threshold. See https://cloud.google.com/dlp/docs/likelihood for more info Default value is POSSIBLE. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    RuleSets List<PreventionJobTriggerInspectJobInspectConfigRuleSet>
    Set of rules to apply to the findings for this InspectConfig. Exclusion rules, contained in the set are executed in the end, other rules are executed in the order they are specified for each info type. Structure is documented below.
    CustomInfoTypes []PreventionJobTriggerInspectJobInspectConfigCustomInfoType
    Custom info types to be used. See https://cloud.google.com/dlp/docs/creating-custom-infotypes to learn more. Structure is documented below.
    ExcludeInfoTypes bool
    When true, excludes type information of the findings.
    IncludeQuote bool
    When true, a contextual quote from the data that triggered a finding is included in the response.
    InfoTypes []PreventionJobTriggerInspectJobInspectConfigInfoType
    Restricts what infoTypes to look for. The values must correspond to InfoType values returned by infoTypes.list or listed at https://cloud.google.com/dlp/docs/infotypes-reference. When no InfoTypes or CustomInfoTypes are specified in a request, the system may automatically choose what detectors to run. By default this may be all types, but may change over time as detectors are updated. Structure is documented below.
    Limits PreventionJobTriggerInspectJobInspectConfigLimits
    Configuration to control the number of findings returned. Structure is documented below.
    MinLikelihood string
    Only returns findings equal or above this threshold. See https://cloud.google.com/dlp/docs/likelihood for more info Default value is POSSIBLE. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    RuleSets []PreventionJobTriggerInspectJobInspectConfigRuleSet
    Set of rules to apply to the findings for this InspectConfig. Exclusion rules, contained in the set are executed in the end, other rules are executed in the order they are specified for each info type. Structure is documented below.
    customInfoTypes List<PreventionJobTriggerInspectJobInspectConfigCustomInfoType>
    Custom info types to be used. See https://cloud.google.com/dlp/docs/creating-custom-infotypes to learn more. Structure is documented below.
    excludeInfoTypes Boolean
    When true, excludes type information of the findings.
    includeQuote Boolean
    When true, a contextual quote from the data that triggered a finding is included in the response.
    infoTypes List<PreventionJobTriggerInspectJobInspectConfigInfoType>
    Restricts what infoTypes to look for. The values must correspond to InfoType values returned by infoTypes.list or listed at https://cloud.google.com/dlp/docs/infotypes-reference. When no InfoTypes or CustomInfoTypes are specified in a request, the system may automatically choose what detectors to run. By default this may be all types, but may change over time as detectors are updated. Structure is documented below.
    limits PreventionJobTriggerInspectJobInspectConfigLimits
    Configuration to control the number of findings returned. Structure is documented below.
    minLikelihood String
    Only returns findings equal or above this threshold. See https://cloud.google.com/dlp/docs/likelihood for more info Default value is POSSIBLE. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    ruleSets List<PreventionJobTriggerInspectJobInspectConfigRuleSet>
    Set of rules to apply to the findings for this InspectConfig. Exclusion rules, contained in the set are executed in the end, other rules are executed in the order they are specified for each info type. Structure is documented below.
    customInfoTypes PreventionJobTriggerInspectJobInspectConfigCustomInfoType[]
    Custom info types to be used. See https://cloud.google.com/dlp/docs/creating-custom-infotypes to learn more. Structure is documented below.
    excludeInfoTypes boolean
    When true, excludes type information of the findings.
    includeQuote boolean
    When true, a contextual quote from the data that triggered a finding is included in the response.
    infoTypes PreventionJobTriggerInspectJobInspectConfigInfoType[]
    Restricts what infoTypes to look for. The values must correspond to InfoType values returned by infoTypes.list or listed at https://cloud.google.com/dlp/docs/infotypes-reference. When no InfoTypes or CustomInfoTypes are specified in a request, the system may automatically choose what detectors to run. By default this may be all types, but may change over time as detectors are updated. Structure is documented below.
    limits PreventionJobTriggerInspectJobInspectConfigLimits
    Configuration to control the number of findings returned. Structure is documented below.
    minLikelihood string
    Only returns findings equal or above this threshold. See https://cloud.google.com/dlp/docs/likelihood for more info Default value is POSSIBLE. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    ruleSets PreventionJobTriggerInspectJobInspectConfigRuleSet[]
    Set of rules to apply to the findings for this InspectConfig. Exclusion rules, contained in the set are executed in the end, other rules are executed in the order they are specified for each info type. Structure is documented below.
    custom_info_types Sequence[PreventionJobTriggerInspectJobInspectConfigCustomInfoType]
    Custom info types to be used. See https://cloud.google.com/dlp/docs/creating-custom-infotypes to learn more. Structure is documented below.
    exclude_info_types bool
    When true, excludes type information of the findings.
    include_quote bool
    When true, a contextual quote from the data that triggered a finding is included in the response.
    info_types Sequence[PreventionJobTriggerInspectJobInspectConfigInfoType]
    Restricts what infoTypes to look for. The values must correspond to InfoType values returned by infoTypes.list or listed at https://cloud.google.com/dlp/docs/infotypes-reference. When no InfoTypes or CustomInfoTypes are specified in a request, the system may automatically choose what detectors to run. By default this may be all types, but may change over time as detectors are updated. Structure is documented below.
    limits PreventionJobTriggerInspectJobInspectConfigLimits
    Configuration to control the number of findings returned. Structure is documented below.
    min_likelihood str
    Only returns findings equal or above this threshold. See https://cloud.google.com/dlp/docs/likelihood for more info Default value is POSSIBLE. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    rule_sets Sequence[PreventionJobTriggerInspectJobInspectConfigRuleSet]
    Set of rules to apply to the findings for this InspectConfig. Exclusion rules, contained in the set are executed in the end, other rules are executed in the order they are specified for each info type. Structure is documented below.
    customInfoTypes List<Property Map>
    Custom info types to be used. See https://cloud.google.com/dlp/docs/creating-custom-infotypes to learn more. Structure is documented below.
    excludeInfoTypes Boolean
    When true, excludes type information of the findings.
    includeQuote Boolean
    When true, a contextual quote from the data that triggered a finding is included in the response.
    infoTypes List<Property Map>
    Restricts what infoTypes to look for. The values must correspond to InfoType values returned by infoTypes.list or listed at https://cloud.google.com/dlp/docs/infotypes-reference. When no InfoTypes or CustomInfoTypes are specified in a request, the system may automatically choose what detectors to run. By default this may be all types, but may change over time as detectors are updated. Structure is documented below.
    limits Property Map
    Configuration to control the number of findings returned. Structure is documented below.
    minLikelihood String
    Only returns findings equal or above this threshold. See https://cloud.google.com/dlp/docs/likelihood for more info Default value is POSSIBLE. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    ruleSets List<Property Map>
    Set of rules to apply to the findings for this InspectConfig. Exclusion rules, contained in the set are executed in the end, other rules are executed in the order they are specified for each info type. Structure is documented below.

    PreventionJobTriggerInspectJobInspectConfigCustomInfoType, PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeArgs

    InfoType PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoType
    CustomInfoType can either be a new infoType, or an extension of built-in infoType, when the name matches one of existing infoTypes and that infoType is specified in info_types field. Specifying the latter adds findings to the one detected by the system. If built-in info type is not specified in info_types list then the name is treated as a custom info type. Structure is documented below.
    Dictionary PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionary
    Dictionary which defines the rule. Structure is documented below.
    ExclusionType string
    If set to EXCLUSION_TYPE_EXCLUDE this infoType will not cause a finding to be returned. It still can be used for rules matching. Possible values are: EXCLUSION_TYPE_EXCLUDE.
    Likelihood string
    Likelihood to return for this CustomInfoType. This base value can be altered by a detection rule if the finding meets the criteria specified by the rule. Default value is VERY_LIKELY. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    Regex PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegex
    Regular expression which defines the rule. Structure is documented below.
    SensitivityScore PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    StoredType PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeStoredType
    A reference to a StoredInfoType to use with scanning. Structure is documented below.
    SurrogateType PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeSurrogateType
    Message for detecting output from deidentification transformations that support reversing.
    InfoType PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoType
    CustomInfoType can either be a new infoType, or an extension of built-in infoType, when the name matches one of existing infoTypes and that infoType is specified in info_types field. Specifying the latter adds findings to the one detected by the system. If built-in info type is not specified in info_types list then the name is treated as a custom info type. Structure is documented below.
    Dictionary PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionary
    Dictionary which defines the rule. Structure is documented below.
    ExclusionType string
    If set to EXCLUSION_TYPE_EXCLUDE this infoType will not cause a finding to be returned. It still can be used for rules matching. Possible values are: EXCLUSION_TYPE_EXCLUDE.
    Likelihood string
    Likelihood to return for this CustomInfoType. This base value can be altered by a detection rule if the finding meets the criteria specified by the rule. Default value is VERY_LIKELY. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    Regex PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegex
    Regular expression which defines the rule. Structure is documented below.
    SensitivityScore PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    StoredType PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeStoredType
    A reference to a StoredInfoType to use with scanning. Structure is documented below.
    SurrogateType PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeSurrogateType
    Message for detecting output from deidentification transformations that support reversing.
    infoType PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoType
    CustomInfoType can either be a new infoType, or an extension of built-in infoType, when the name matches one of existing infoTypes and that infoType is specified in info_types field. Specifying the latter adds findings to the one detected by the system. If built-in info type is not specified in info_types list then the name is treated as a custom info type. Structure is documented below.
    dictionary PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionary
    Dictionary which defines the rule. Structure is documented below.
    exclusionType String
    If set to EXCLUSION_TYPE_EXCLUDE this infoType will not cause a finding to be returned. It still can be used for rules matching. Possible values are: EXCLUSION_TYPE_EXCLUDE.
    likelihood String
    Likelihood to return for this CustomInfoType. This base value can be altered by a detection rule if the finding meets the criteria specified by the rule. Default value is VERY_LIKELY. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    regex PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegex
    Regular expression which defines the rule. Structure is documented below.
    sensitivityScore PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    storedType PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeStoredType
    A reference to a StoredInfoType to use with scanning. Structure is documented below.
    surrogateType PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeSurrogateType
    Message for detecting output from deidentification transformations that support reversing.
    infoType PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoType
    CustomInfoType can either be a new infoType, or an extension of built-in infoType, when the name matches one of existing infoTypes and that infoType is specified in info_types field. Specifying the latter adds findings to the one detected by the system. If built-in info type is not specified in info_types list then the name is treated as a custom info type. Structure is documented below.
    dictionary PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionary
    Dictionary which defines the rule. Structure is documented below.
    exclusionType string
    If set to EXCLUSION_TYPE_EXCLUDE this infoType will not cause a finding to be returned. It still can be used for rules matching. Possible values are: EXCLUSION_TYPE_EXCLUDE.
    likelihood string
    Likelihood to return for this CustomInfoType. This base value can be altered by a detection rule if the finding meets the criteria specified by the rule. Default value is VERY_LIKELY. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    regex PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegex
    Regular expression which defines the rule. Structure is documented below.
    sensitivityScore PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    storedType PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeStoredType
    A reference to a StoredInfoType to use with scanning. Structure is documented below.
    surrogateType PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeSurrogateType
    Message for detecting output from deidentification transformations that support reversing.
    info_type PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoType
    CustomInfoType can either be a new infoType, or an extension of built-in infoType, when the name matches one of existing infoTypes and that infoType is specified in info_types field. Specifying the latter adds findings to the one detected by the system. If built-in info type is not specified in info_types list then the name is treated as a custom info type. Structure is documented below.
    dictionary PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionary
    Dictionary which defines the rule. Structure is documented below.
    exclusion_type str
    If set to EXCLUSION_TYPE_EXCLUDE this infoType will not cause a finding to be returned. It still can be used for rules matching. Possible values are: EXCLUSION_TYPE_EXCLUDE.
    likelihood str
    Likelihood to return for this CustomInfoType. This base value can be altered by a detection rule if the finding meets the criteria specified by the rule. Default value is VERY_LIKELY. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    regex PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegex
    Regular expression which defines the rule. Structure is documented below.
    sensitivity_score PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    stored_type PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeStoredType
    A reference to a StoredInfoType to use with scanning. Structure is documented below.
    surrogate_type PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeSurrogateType
    Message for detecting output from deidentification transformations that support reversing.
    infoType Property Map
    CustomInfoType can either be a new infoType, or an extension of built-in infoType, when the name matches one of existing infoTypes and that infoType is specified in info_types field. Specifying the latter adds findings to the one detected by the system. If built-in info type is not specified in info_types list then the name is treated as a custom info type. Structure is documented below.
    dictionary Property Map
    Dictionary which defines the rule. Structure is documented below.
    exclusionType String
    If set to EXCLUSION_TYPE_EXCLUDE this infoType will not cause a finding to be returned. It still can be used for rules matching. Possible values are: EXCLUSION_TYPE_EXCLUDE.
    likelihood String
    Likelihood to return for this CustomInfoType. This base value can be altered by a detection rule if the finding meets the criteria specified by the rule. Default value is VERY_LIKELY. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    regex Property Map
    Regular expression which defines the rule. Structure is documented below.
    sensitivityScore Property Map
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    storedType Property Map
    A reference to a StoredInfoType to use with scanning. Structure is documented below.
    surrogateType Property Map
    Message for detecting output from deidentification transformations that support reversing.

    PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionary, PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryArgs

    CloudStoragePath PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryCloudStoragePath
    Newline-delimited file of words in Cloud Storage. Only a single file is accepted. Structure is documented below.
    WordList PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryWordList
    List of words or phrases to search for. Structure is documented below.
    CloudStoragePath PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryCloudStoragePath
    Newline-delimited file of words in Cloud Storage. Only a single file is accepted. Structure is documented below.
    WordList PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryWordList
    List of words or phrases to search for. Structure is documented below.
    cloudStoragePath PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryCloudStoragePath
    Newline-delimited file of words in Cloud Storage. Only a single file is accepted. Structure is documented below.
    wordList PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryWordList
    List of words or phrases to search for. Structure is documented below.
    cloudStoragePath PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryCloudStoragePath
    Newline-delimited file of words in Cloud Storage. Only a single file is accepted. Structure is documented below.
    wordList PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryWordList
    List of words or phrases to search for. Structure is documented below.
    cloud_storage_path PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryCloudStoragePath
    Newline-delimited file of words in Cloud Storage. Only a single file is accepted. Structure is documented below.
    word_list PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryWordList
    List of words or phrases to search for. Structure is documented below.
    cloudStoragePath Property Map
    Newline-delimited file of words in Cloud Storage. Only a single file is accepted. Structure is documented below.
    wordList Property Map
    List of words or phrases to search for. Structure is documented below.

    PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryCloudStoragePath, PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryCloudStoragePathArgs

    Path string
    A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt
    Path string
    A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt
    path String
    A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt
    path string
    A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt
    path str
    A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt
    path String
    A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt

    PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryWordList, PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeDictionaryWordListArgs

    Words List<string>
    Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits.
    Words []string
    Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits.
    words List<String>
    Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits.
    words string[]
    Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits.
    words Sequence[str]
    Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits.
    words List<String>
    Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits.

    PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoType, PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeArgs

    Name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    SensitivityScore PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    Version string
    Version of the information type to use. By default, the version is set to stable.
    Name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    SensitivityScore PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    Version string
    Version of the information type to use. By default, the version is set to stable.
    name String
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version String
    Version of the information type to use. By default, the version is set to stable.
    name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version string
    Version of the information type to use. By default, the version is set to stable.
    name str
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivity_score PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version str
    Version of the information type to use. By default, the version is set to stable.
    name String
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore Property Map
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version String
    Version of the information type to use. By default, the version is set to stable.

    PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeSensitivityScore, PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeInfoTypeSensitivityScoreArgs

    Score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    Score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score String
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score str
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score String
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.

    PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegex, PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeRegexArgs

    Pattern string
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    GroupIndexes List<int>
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    Pattern string
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    GroupIndexes []int
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern String
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    groupIndexes List<Integer>
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern string
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    groupIndexes number[]
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern str
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    group_indexes Sequence[int]
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern String
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    groupIndexes List<Number>
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.

    PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeSensitivityScore, PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeSensitivityScoreArgs

    Score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    Score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score String
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score str
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score String
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.

    PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeStoredType, PreventionJobTriggerInspectJobInspectConfigCustomInfoTypeStoredTypeArgs

    Name string
    Resource name of the requested StoredInfoType, for example organizations/433245324/storedInfoTypes/432452342 or projects/project-id/storedInfoTypes/432452342.
    CreateTime string
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    Name string
    Resource name of the requested StoredInfoType, for example organizations/433245324/storedInfoTypes/432452342 or projects/project-id/storedInfoTypes/432452342.
    CreateTime string
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    name String
    Resource name of the requested StoredInfoType, for example organizations/433245324/storedInfoTypes/432452342 or projects/project-id/storedInfoTypes/432452342.
    createTime String
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    name string
    Resource name of the requested StoredInfoType, for example organizations/433245324/storedInfoTypes/432452342 or projects/project-id/storedInfoTypes/432452342.
    createTime string
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    name str
    Resource name of the requested StoredInfoType, for example organizations/433245324/storedInfoTypes/432452342 or projects/project-id/storedInfoTypes/432452342.
    create_time str
    (Output) The creation timestamp of an inspectTemplate. Set by the server.
    name String
    Resource name of the requested StoredInfoType, for example organizations/433245324/storedInfoTypes/432452342 or projects/project-id/storedInfoTypes/432452342.
    createTime String
    (Output) The creation timestamp of an inspectTemplate. Set by the server.

    PreventionJobTriggerInspectJobInspectConfigInfoType, PreventionJobTriggerInspectJobInspectConfigInfoTypeArgs

    Name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    SensitivityScore PreventionJobTriggerInspectJobInspectConfigInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    Version string
    Version of the information type to use. By default, the version is set to stable.
    Name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    SensitivityScore PreventionJobTriggerInspectJobInspectConfigInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    Version string
    Version of the information type to use. By default, the version is set to stable.
    name String
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore PreventionJobTriggerInspectJobInspectConfigInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version String
    Version of the information type to use. By default, the version is set to stable.
    name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore PreventionJobTriggerInspectJobInspectConfigInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version string
    Version of the information type to use. By default, the version is set to stable.
    name str
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivity_score PreventionJobTriggerInspectJobInspectConfigInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version str
    Version of the information type to use. By default, the version is set to stable.
    name String
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore Property Map
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version String
    Version of the information type to use. By default, the version is set to stable.

    PreventionJobTriggerInspectJobInspectConfigInfoTypeSensitivityScore, PreventionJobTriggerInspectJobInspectConfigInfoTypeSensitivityScoreArgs

    Score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    Score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score String
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score str
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score String
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.

    PreventionJobTriggerInspectJobInspectConfigLimits, PreventionJobTriggerInspectJobInspectConfigLimitsArgs

    MaxFindingsPerInfoTypes List<PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoType>
    Configuration of findings limit given for specified infoTypes. Structure is documented below.
    MaxFindingsPerItem int
    Max number of findings that will be returned for each item scanned. The maximum returned is 2000.
    MaxFindingsPerRequest int
    Max number of findings that will be returned per request/job. The maximum returned is 2000.
    MaxFindingsPerInfoTypes []PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoType
    Configuration of findings limit given for specified infoTypes. Structure is documented below.
    MaxFindingsPerItem int
    Max number of findings that will be returned for each item scanned. The maximum returned is 2000.
    MaxFindingsPerRequest int
    Max number of findings that will be returned per request/job. The maximum returned is 2000.
    maxFindingsPerInfoTypes List<PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoType>
    Configuration of findings limit given for specified infoTypes. Structure is documented below.
    maxFindingsPerItem Integer
    Max number of findings that will be returned for each item scanned. The maximum returned is 2000.
    maxFindingsPerRequest Integer
    Max number of findings that will be returned per request/job. The maximum returned is 2000.
    maxFindingsPerInfoTypes PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoType[]
    Configuration of findings limit given for specified infoTypes. Structure is documented below.
    maxFindingsPerItem number
    Max number of findings that will be returned for each item scanned. The maximum returned is 2000.
    maxFindingsPerRequest number
    Max number of findings that will be returned per request/job. The maximum returned is 2000.
    max_findings_per_info_types Sequence[PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoType]
    Configuration of findings limit given for specified infoTypes. Structure is documented below.
    max_findings_per_item int
    Max number of findings that will be returned for each item scanned. The maximum returned is 2000.
    max_findings_per_request int
    Max number of findings that will be returned per request/job. The maximum returned is 2000.
    maxFindingsPerInfoTypes List<Property Map>
    Configuration of findings limit given for specified infoTypes. Structure is documented below.
    maxFindingsPerItem Number
    Max number of findings that will be returned for each item scanned. The maximum returned is 2000.
    maxFindingsPerRequest Number
    Max number of findings that will be returned per request/job. The maximum returned is 2000.

    PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoType, PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeArgs

    InfoType PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoType
    Type of information the findings limit applies to. Only one limit per infoType should be provided. If InfoTypeLimit does not have an infoType, the DLP API applies the limit against all infoTypes that are found but not specified in another InfoTypeLimit. Structure is documented below.
    MaxFindings int
    Max findings limit for the given infoType.
    InfoType PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoType
    Type of information the findings limit applies to. Only one limit per infoType should be provided. If InfoTypeLimit does not have an infoType, the DLP API applies the limit against all infoTypes that are found but not specified in another InfoTypeLimit. Structure is documented below.
    MaxFindings int
    Max findings limit for the given infoType.
    infoType PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoType
    Type of information the findings limit applies to. Only one limit per infoType should be provided. If InfoTypeLimit does not have an infoType, the DLP API applies the limit against all infoTypes that are found but not specified in another InfoTypeLimit. Structure is documented below.
    maxFindings Integer
    Max findings limit for the given infoType.
    infoType PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoType
    Type of information the findings limit applies to. Only one limit per infoType should be provided. If InfoTypeLimit does not have an infoType, the DLP API applies the limit against all infoTypes that are found but not specified in another InfoTypeLimit. Structure is documented below.
    maxFindings number
    Max findings limit for the given infoType.
    info_type PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoType
    Type of information the findings limit applies to. Only one limit per infoType should be provided. If InfoTypeLimit does not have an infoType, the DLP API applies the limit against all infoTypes that are found but not specified in another InfoTypeLimit. Structure is documented below.
    max_findings int
    Max findings limit for the given infoType.
    infoType Property Map
    Type of information the findings limit applies to. Only one limit per infoType should be provided. If InfoTypeLimit does not have an infoType, the DLP API applies the limit against all infoTypes that are found but not specified in another InfoTypeLimit. Structure is documented below.
    maxFindings Number
    Max findings limit for the given infoType.

    PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoType, PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoTypeArgs

    Name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    SensitivityScore PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    Version string
    Version of the information type to use. By default, the version is set to stable.
    Name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    SensitivityScore PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    Version string
    Version of the information type to use. By default, the version is set to stable.
    name String
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version String
    Version of the information type to use. By default, the version is set to stable.
    name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version string
    Version of the information type to use. By default, the version is set to stable.
    name str
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivity_score PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version str
    Version of the information type to use. By default, the version is set to stable.
    name String
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore Property Map
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version String
    Version of the information type to use. By default, the version is set to stable.

    PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoTypeSensitivityScore, PreventionJobTriggerInspectJobInspectConfigLimitsMaxFindingsPerInfoTypeInfoTypeSensitivityScoreArgs

    Score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    Score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score String
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score str
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score String
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.

    PreventionJobTriggerInspectJobInspectConfigRuleSet, PreventionJobTriggerInspectJobInspectConfigRuleSetArgs

    Rules List<PreventionJobTriggerInspectJobInspectConfigRuleSetRule>
    Set of rules to be applied to infoTypes. The rules are applied in order. Structure is documented below.
    InfoTypes List<PreventionJobTriggerInspectJobInspectConfigRuleSetInfoType>
    List of infoTypes this rule set is applied to. Structure is documented below.
    Rules []PreventionJobTriggerInspectJobInspectConfigRuleSetRule
    Set of rules to be applied to infoTypes. The rules are applied in order. Structure is documented below.
    InfoTypes []PreventionJobTriggerInspectJobInspectConfigRuleSetInfoType
    List of infoTypes this rule set is applied to. Structure is documented below.
    rules List<PreventionJobTriggerInspectJobInspectConfigRuleSetRule>
    Set of rules to be applied to infoTypes. The rules are applied in order. Structure is documented below.
    infoTypes List<PreventionJobTriggerInspectJobInspectConfigRuleSetInfoType>
    List of infoTypes this rule set is applied to. Structure is documented below.
    rules PreventionJobTriggerInspectJobInspectConfigRuleSetRule[]
    Set of rules to be applied to infoTypes. The rules are applied in order. Structure is documented below.
    infoTypes PreventionJobTriggerInspectJobInspectConfigRuleSetInfoType[]
    List of infoTypes this rule set is applied to. Structure is documented below.
    rules Sequence[PreventionJobTriggerInspectJobInspectConfigRuleSetRule]
    Set of rules to be applied to infoTypes. The rules are applied in order. Structure is documented below.
    info_types Sequence[PreventionJobTriggerInspectJobInspectConfigRuleSetInfoType]
    List of infoTypes this rule set is applied to. Structure is documented below.
    rules List<Property Map>
    Set of rules to be applied to infoTypes. The rules are applied in order. Structure is documented below.
    infoTypes List<Property Map>
    List of infoTypes this rule set is applied to. Structure is documented below.

    PreventionJobTriggerInspectJobInspectConfigRuleSetInfoType, PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeArgs

    Name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    SensitivityScore PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    Version string
    Version of the information type to use. By default, the version is set to stable.
    Name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    SensitivityScore PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    Version string
    Version of the information type to use. By default, the version is set to stable.
    name String
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version String
    Version of the information type to use. By default, the version is set to stable.
    name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version string
    Version of the information type to use. By default, the version is set to stable.
    name str
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivity_score PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version str
    Version of the information type to use. By default, the version is set to stable.
    name String
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore Property Map
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version String
    Version of the information type to use. By default, the version is set to stable.

    PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeSensitivityScore, PreventionJobTriggerInspectJobInspectConfigRuleSetInfoTypeSensitivityScoreArgs

    Score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    Score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score String
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score str
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score String
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRule, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleArgs

    ExclusionRule PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRule
    The rule that specifies conditions when findings of infoTypes specified in InspectionRuleSet are removed from results. Structure is documented below.
    HotwordRule PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRule
    Hotword-based detection rule. Structure is documented below.
    ExclusionRule PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRule
    The rule that specifies conditions when findings of infoTypes specified in InspectionRuleSet are removed from results. Structure is documented below.
    HotwordRule PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRule
    Hotword-based detection rule. Structure is documented below.
    exclusionRule PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRule
    The rule that specifies conditions when findings of infoTypes specified in InspectionRuleSet are removed from results. Structure is documented below.
    hotwordRule PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRule
    Hotword-based detection rule. Structure is documented below.
    exclusionRule PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRule
    The rule that specifies conditions when findings of infoTypes specified in InspectionRuleSet are removed from results. Structure is documented below.
    hotwordRule PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRule
    Hotword-based detection rule. Structure is documented below.
    exclusion_rule PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRule
    The rule that specifies conditions when findings of infoTypes specified in InspectionRuleSet are removed from results. Structure is documented below.
    hotword_rule PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRule
    Hotword-based detection rule. Structure is documented below.
    exclusionRule Property Map
    The rule that specifies conditions when findings of infoTypes specified in InspectionRuleSet are removed from results. Structure is documented below.
    hotwordRule Property Map
    Hotword-based detection rule. Structure is documented below.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRule, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleArgs

    MatchingType string
    How the rule is applied. See the documentation for more information: https://cloud.google.com/dlp/docs/reference/rest/v2/InspectConfig#MatchingType Possible values are: MATCHING_TYPE_FULL_MATCH, MATCHING_TYPE_PARTIAL_MATCH, MATCHING_TYPE_INVERSE_MATCH.
    Dictionary PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionary
    Dictionary which defines the rule. Structure is documented below.
    ExcludeByHotword PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotword
    Drop if the hotword rule is contained in the proximate context. Structure is documented below.
    ExcludeInfoTypes PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypes
    Set of infoTypes for which findings would affect this rule. Structure is documented below.
    Regex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegex
    Regular expression which defines the rule. Structure is documented below.
    MatchingType string
    How the rule is applied. See the documentation for more information: https://cloud.google.com/dlp/docs/reference/rest/v2/InspectConfig#MatchingType Possible values are: MATCHING_TYPE_FULL_MATCH, MATCHING_TYPE_PARTIAL_MATCH, MATCHING_TYPE_INVERSE_MATCH.
    Dictionary PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionary
    Dictionary which defines the rule. Structure is documented below.
    ExcludeByHotword PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotword
    Drop if the hotword rule is contained in the proximate context. Structure is documented below.
    ExcludeInfoTypes PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypes
    Set of infoTypes for which findings would affect this rule. Structure is documented below.
    Regex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegex
    Regular expression which defines the rule. Structure is documented below.
    matchingType String
    How the rule is applied. See the documentation for more information: https://cloud.google.com/dlp/docs/reference/rest/v2/InspectConfig#MatchingType Possible values are: MATCHING_TYPE_FULL_MATCH, MATCHING_TYPE_PARTIAL_MATCH, MATCHING_TYPE_INVERSE_MATCH.
    dictionary PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionary
    Dictionary which defines the rule. Structure is documented below.
    excludeByHotword PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotword
    Drop if the hotword rule is contained in the proximate context. Structure is documented below.
    excludeInfoTypes PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypes
    Set of infoTypes for which findings would affect this rule. Structure is documented below.
    regex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegex
    Regular expression which defines the rule. Structure is documented below.
    matchingType string
    How the rule is applied. See the documentation for more information: https://cloud.google.com/dlp/docs/reference/rest/v2/InspectConfig#MatchingType Possible values are: MATCHING_TYPE_FULL_MATCH, MATCHING_TYPE_PARTIAL_MATCH, MATCHING_TYPE_INVERSE_MATCH.
    dictionary PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionary
    Dictionary which defines the rule. Structure is documented below.
    excludeByHotword PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotword
    Drop if the hotword rule is contained in the proximate context. Structure is documented below.
    excludeInfoTypes PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypes
    Set of infoTypes for which findings would affect this rule. Structure is documented below.
    regex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegex
    Regular expression which defines the rule. Structure is documented below.
    matching_type str
    How the rule is applied. See the documentation for more information: https://cloud.google.com/dlp/docs/reference/rest/v2/InspectConfig#MatchingType Possible values are: MATCHING_TYPE_FULL_MATCH, MATCHING_TYPE_PARTIAL_MATCH, MATCHING_TYPE_INVERSE_MATCH.
    dictionary PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionary
    Dictionary which defines the rule. Structure is documented below.
    exclude_by_hotword PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotword
    Drop if the hotword rule is contained in the proximate context. Structure is documented below.
    exclude_info_types PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypes
    Set of infoTypes for which findings would affect this rule. Structure is documented below.
    regex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegex
    Regular expression which defines the rule. Structure is documented below.
    matchingType String
    How the rule is applied. See the documentation for more information: https://cloud.google.com/dlp/docs/reference/rest/v2/InspectConfig#MatchingType Possible values are: MATCHING_TYPE_FULL_MATCH, MATCHING_TYPE_PARTIAL_MATCH, MATCHING_TYPE_INVERSE_MATCH.
    dictionary Property Map
    Dictionary which defines the rule. Structure is documented below.
    excludeByHotword Property Map
    Drop if the hotword rule is contained in the proximate context. Structure is documented below.
    excludeInfoTypes Property Map
    Set of infoTypes for which findings would affect this rule. Structure is documented below.
    regex Property Map
    Regular expression which defines the rule. Structure is documented below.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionary, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryArgs

    CloudStoragePath PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryCloudStoragePath
    Newline-delimited file of words in Cloud Storage. Only a single file is accepted. Structure is documented below.
    WordList PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryWordList
    List of words or phrases to search for. Structure is documented below.
    CloudStoragePath PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryCloudStoragePath
    Newline-delimited file of words in Cloud Storage. Only a single file is accepted. Structure is documented below.
    WordList PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryWordList
    List of words or phrases to search for. Structure is documented below.
    cloudStoragePath PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryCloudStoragePath
    Newline-delimited file of words in Cloud Storage. Only a single file is accepted. Structure is documented below.
    wordList PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryWordList
    List of words or phrases to search for. Structure is documented below.
    cloudStoragePath PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryCloudStoragePath
    Newline-delimited file of words in Cloud Storage. Only a single file is accepted. Structure is documented below.
    wordList PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryWordList
    List of words or phrases to search for. Structure is documented below.
    cloud_storage_path PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryCloudStoragePath
    Newline-delimited file of words in Cloud Storage. Only a single file is accepted. Structure is documented below.
    word_list PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryWordList
    List of words or phrases to search for. Structure is documented below.
    cloudStoragePath Property Map
    Newline-delimited file of words in Cloud Storage. Only a single file is accepted. Structure is documented below.
    wordList Property Map
    List of words or phrases to search for. Structure is documented below.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryCloudStoragePath, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryCloudStoragePathArgs

    Path string
    A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt
    Path string
    A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt
    path String
    A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt
    path string
    A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt
    path str
    A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt
    path String
    A url representing a file or path (no wildcards) in Cloud Storage. Example: gs://[BUCKET_NAME]/dictionary.txt

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryWordList, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleDictionaryWordListArgs

    Words List<string>
    Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits.
    Words []string
    Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits.
    words List<String>
    Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits.
    words string[]
    Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits.
    words Sequence[str]
    Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits.
    words List<String>
    Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotword, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordArgs

    HotwordRegex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordHotwordRegex
    Regular expression pattern defining what qualifies as a hotword. Structure is documented below.
    Proximity PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordProximity
    Proximity of the finding within which the entire hotword must reside. The total length of the window cannot exceed 1000 characters. Note that the finding itself will be included in the window, so that hotwords may be used to match substrings of the finding itself. For example, the certainty of a phone number regex (\d{3}) \d{3}-\d{4} could be adjusted upwards if the area code is known to be the local area code of a company office using the hotword regex (xxx), where xxx is the area code in question. Structure is documented below.
    HotwordRegex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordHotwordRegex
    Regular expression pattern defining what qualifies as a hotword. Structure is documented below.
    Proximity PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordProximity
    Proximity of the finding within which the entire hotword must reside. The total length of the window cannot exceed 1000 characters. Note that the finding itself will be included in the window, so that hotwords may be used to match substrings of the finding itself. For example, the certainty of a phone number regex (\d{3}) \d{3}-\d{4} could be adjusted upwards if the area code is known to be the local area code of a company office using the hotword regex (xxx), where xxx is the area code in question. Structure is documented below.
    hotwordRegex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordHotwordRegex
    Regular expression pattern defining what qualifies as a hotword. Structure is documented below.
    proximity PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordProximity
    Proximity of the finding within which the entire hotword must reside. The total length of the window cannot exceed 1000 characters. Note that the finding itself will be included in the window, so that hotwords may be used to match substrings of the finding itself. For example, the certainty of a phone number regex (\d{3}) \d{3}-\d{4} could be adjusted upwards if the area code is known to be the local area code of a company office using the hotword regex (xxx), where xxx is the area code in question. Structure is documented below.
    hotwordRegex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordHotwordRegex
    Regular expression pattern defining what qualifies as a hotword. Structure is documented below.
    proximity PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordProximity
    Proximity of the finding within which the entire hotword must reside. The total length of the window cannot exceed 1000 characters. Note that the finding itself will be included in the window, so that hotwords may be used to match substrings of the finding itself. For example, the certainty of a phone number regex (\d{3}) \d{3}-\d{4} could be adjusted upwards if the area code is known to be the local area code of a company office using the hotword regex (xxx), where xxx is the area code in question. Structure is documented below.
    hotword_regex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordHotwordRegex
    Regular expression pattern defining what qualifies as a hotword. Structure is documented below.
    proximity PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordProximity
    Proximity of the finding within which the entire hotword must reside. The total length of the window cannot exceed 1000 characters. Note that the finding itself will be included in the window, so that hotwords may be used to match substrings of the finding itself. For example, the certainty of a phone number regex (\d{3}) \d{3}-\d{4} could be adjusted upwards if the area code is known to be the local area code of a company office using the hotword regex (xxx), where xxx is the area code in question. Structure is documented below.
    hotwordRegex Property Map
    Regular expression pattern defining what qualifies as a hotword. Structure is documented below.
    proximity Property Map
    Proximity of the finding within which the entire hotword must reside. The total length of the window cannot exceed 1000 characters. Note that the finding itself will be included in the window, so that hotwords may be used to match substrings of the finding itself. For example, the certainty of a phone number regex (\d{3}) \d{3}-\d{4} could be adjusted upwards if the area code is known to be the local area code of a company office using the hotword regex (xxx), where xxx is the area code in question. Structure is documented below.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordHotwordRegex, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordHotwordRegexArgs

    GroupIndexes List<int>
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    Pattern string
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    GroupIndexes []int
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    Pattern string
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    groupIndexes List<Integer>
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern String
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    groupIndexes number[]
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern string
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    group_indexes Sequence[int]
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern str
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    groupIndexes List<Number>
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern String
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordProximity, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeByHotwordProximityArgs

    WindowAfter int
    Number of characters after the finding to consider. Either this or window_before must be specified
    WindowBefore int
    Number of characters before the finding to consider. Either this or window_after must be specified
    WindowAfter int
    Number of characters after the finding to consider. Either this or window_before must be specified
    WindowBefore int
    Number of characters before the finding to consider. Either this or window_after must be specified
    windowAfter Integer
    Number of characters after the finding to consider. Either this or window_before must be specified
    windowBefore Integer
    Number of characters before the finding to consider. Either this or window_after must be specified
    windowAfter number
    Number of characters after the finding to consider. Either this or window_before must be specified
    windowBefore number
    Number of characters before the finding to consider. Either this or window_after must be specified
    window_after int
    Number of characters after the finding to consider. Either this or window_before must be specified
    window_before int
    Number of characters before the finding to consider. Either this or window_after must be specified
    windowAfter Number
    Number of characters after the finding to consider. Either this or window_before must be specified
    windowBefore Number
    Number of characters before the finding to consider. Either this or window_after must be specified

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypes, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesArgs

    InfoTypes List<PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoType>
    If a finding is matched by any of the infoType detectors listed here, the finding will be excluded from the scan results. Structure is documented below.
    InfoTypes []PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoType
    If a finding is matched by any of the infoType detectors listed here, the finding will be excluded from the scan results. Structure is documented below.
    infoTypes List<PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoType>
    If a finding is matched by any of the infoType detectors listed here, the finding will be excluded from the scan results. Structure is documented below.
    infoTypes PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoType[]
    If a finding is matched by any of the infoType detectors listed here, the finding will be excluded from the scan results. Structure is documented below.
    info_types Sequence[PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoType]
    If a finding is matched by any of the infoType detectors listed here, the finding will be excluded from the scan results. Structure is documented below.
    infoTypes List<Property Map>
    If a finding is matched by any of the infoType detectors listed here, the finding will be excluded from the scan results. Structure is documented below.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoType, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoTypeArgs

    Name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    SensitivityScore PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    Version string
    Version of the information type to use. By default, the version is set to stable.
    Name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    SensitivityScore PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    Version string
    Version of the information type to use. By default, the version is set to stable.
    name String
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version String
    Version of the information type to use. By default, the version is set to stable.
    name string
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version string
    Version of the information type to use. By default, the version is set to stable.
    name str
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivity_score PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoTypeSensitivityScore
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version str
    Version of the information type to use. By default, the version is set to stable.
    name String
    Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference when specifying a built-in type.
    sensitivityScore Property Map
    Optional custom sensitivity for this InfoType. This only applies to data profiling. Structure is documented below.
    version String
    Version of the information type to use. By default, the version is set to stable.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoTypeSensitivityScore, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleExcludeInfoTypesInfoTypeSensitivityScoreArgs

    Score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    Score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score String
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score string
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score str
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.
    score String
    The sensitivity score applied to the resource. Possible values are: SENSITIVITY_LOW, SENSITIVITY_MODERATE, SENSITIVITY_HIGH.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegex, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleExclusionRuleRegexArgs

    Pattern string
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    GroupIndexes List<int>
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    Pattern string
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    GroupIndexes []int
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern String
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    groupIndexes List<Integer>
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern string
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    groupIndexes number[]
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern str
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    group_indexes Sequence[int]
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern String
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    groupIndexes List<Number>
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRule, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleArgs

    HotwordRegex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegex
    Regular expression pattern defining what qualifies as a hotword. Structure is documented below.
    LikelihoodAdjustment PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustment
    Likelihood adjustment to apply to all matching findings. Structure is documented below.
    Proximity PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximity
    Proximity of the finding within which the entire hotword must reside. The total length of the window cannot exceed 1000 characters. Note that the finding itself will be included in the window, so that hotwords may be used to match substrings of the finding itself. For example, the certainty of a phone number regex (\d{3}) \d{3}-\d{4} could be adjusted upwards if the area code is known to be the local area code of a company office using the hotword regex (xxx), where xxx is the area code in question. Structure is documented below.
    HotwordRegex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegex
    Regular expression pattern defining what qualifies as a hotword. Structure is documented below.
    LikelihoodAdjustment PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustment
    Likelihood adjustment to apply to all matching findings. Structure is documented below.
    Proximity PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximity
    Proximity of the finding within which the entire hotword must reside. The total length of the window cannot exceed 1000 characters. Note that the finding itself will be included in the window, so that hotwords may be used to match substrings of the finding itself. For example, the certainty of a phone number regex (\d{3}) \d{3}-\d{4} could be adjusted upwards if the area code is known to be the local area code of a company office using the hotword regex (xxx), where xxx is the area code in question. Structure is documented below.
    hotwordRegex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegex
    Regular expression pattern defining what qualifies as a hotword. Structure is documented below.
    likelihoodAdjustment PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustment
    Likelihood adjustment to apply to all matching findings. Structure is documented below.
    proximity PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximity
    Proximity of the finding within which the entire hotword must reside. The total length of the window cannot exceed 1000 characters. Note that the finding itself will be included in the window, so that hotwords may be used to match substrings of the finding itself. For example, the certainty of a phone number regex (\d{3}) \d{3}-\d{4} could be adjusted upwards if the area code is known to be the local area code of a company office using the hotword regex (xxx), where xxx is the area code in question. Structure is documented below.
    hotwordRegex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegex
    Regular expression pattern defining what qualifies as a hotword. Structure is documented below.
    likelihoodAdjustment PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustment
    Likelihood adjustment to apply to all matching findings. Structure is documented below.
    proximity PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximity
    Proximity of the finding within which the entire hotword must reside. The total length of the window cannot exceed 1000 characters. Note that the finding itself will be included in the window, so that hotwords may be used to match substrings of the finding itself. For example, the certainty of a phone number regex (\d{3}) \d{3}-\d{4} could be adjusted upwards if the area code is known to be the local area code of a company office using the hotword regex (xxx), where xxx is the area code in question. Structure is documented below.
    hotword_regex PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegex
    Regular expression pattern defining what qualifies as a hotword. Structure is documented below.
    likelihood_adjustment PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustment
    Likelihood adjustment to apply to all matching findings. Structure is documented below.
    proximity PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximity
    Proximity of the finding within which the entire hotword must reside. The total length of the window cannot exceed 1000 characters. Note that the finding itself will be included in the window, so that hotwords may be used to match substrings of the finding itself. For example, the certainty of a phone number regex (\d{3}) \d{3}-\d{4} could be adjusted upwards if the area code is known to be the local area code of a company office using the hotword regex (xxx), where xxx is the area code in question. Structure is documented below.
    hotwordRegex Property Map
    Regular expression pattern defining what qualifies as a hotword. Structure is documented below.
    likelihoodAdjustment Property Map
    Likelihood adjustment to apply to all matching findings. Structure is documented below.
    proximity Property Map
    Proximity of the finding within which the entire hotword must reside. The total length of the window cannot exceed 1000 characters. Note that the finding itself will be included in the window, so that hotwords may be used to match substrings of the finding itself. For example, the certainty of a phone number regex (\d{3}) \d{3}-\d{4} could be adjusted upwards if the area code is known to be the local area code of a company office using the hotword regex (xxx), where xxx is the area code in question. Structure is documented below.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegex, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleHotwordRegexArgs

    GroupIndexes List<int>
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    Pattern string
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    GroupIndexes []int
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    Pattern string
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    groupIndexes List<Integer>
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern String
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    groupIndexes number[]
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern string
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    group_indexes Sequence[int]
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern str
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.
    groupIndexes List<Number>
    The index of the submatch to extract as findings. When not specified, the entire match is returned. No more than 3 may be included.
    pattern String
    Pattern defining the regular expression. Its syntax (https://github.com/google/re2/wiki/Syntax) can be found under the google/re2 repository on GitHub.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustment, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleLikelihoodAdjustmentArgs

    FixedLikelihood string
    Set the likelihood of a finding to a fixed value. Either this or relative_likelihood can be set. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    RelativeLikelihood int
    Increase or decrease the likelihood by the specified number of levels. For example, if a finding would be POSSIBLE without the detection rule and relativeLikelihood is 1, then it is upgraded to LIKELY, while a value of -1 would downgrade it to UNLIKELY. Likelihood may never drop below VERY_UNLIKELY or exceed VERY_LIKELY, so applying an adjustment of 1 followed by an adjustment of -1 when base likelihood is VERY_LIKELY will result in a final likelihood of LIKELY. Either this or fixed_likelihood can be set.
    FixedLikelihood string
    Set the likelihood of a finding to a fixed value. Either this or relative_likelihood can be set. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    RelativeLikelihood int
    Increase or decrease the likelihood by the specified number of levels. For example, if a finding would be POSSIBLE without the detection rule and relativeLikelihood is 1, then it is upgraded to LIKELY, while a value of -1 would downgrade it to UNLIKELY. Likelihood may never drop below VERY_UNLIKELY or exceed VERY_LIKELY, so applying an adjustment of 1 followed by an adjustment of -1 when base likelihood is VERY_LIKELY will result in a final likelihood of LIKELY. Either this or fixed_likelihood can be set.
    fixedLikelihood String
    Set the likelihood of a finding to a fixed value. Either this or relative_likelihood can be set. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    relativeLikelihood Integer
    Increase or decrease the likelihood by the specified number of levels. For example, if a finding would be POSSIBLE without the detection rule and relativeLikelihood is 1, then it is upgraded to LIKELY, while a value of -1 would downgrade it to UNLIKELY. Likelihood may never drop below VERY_UNLIKELY or exceed VERY_LIKELY, so applying an adjustment of 1 followed by an adjustment of -1 when base likelihood is VERY_LIKELY will result in a final likelihood of LIKELY. Either this or fixed_likelihood can be set.
    fixedLikelihood string
    Set the likelihood of a finding to a fixed value. Either this or relative_likelihood can be set. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    relativeLikelihood number
    Increase or decrease the likelihood by the specified number of levels. For example, if a finding would be POSSIBLE without the detection rule and relativeLikelihood is 1, then it is upgraded to LIKELY, while a value of -1 would downgrade it to UNLIKELY. Likelihood may never drop below VERY_UNLIKELY or exceed VERY_LIKELY, so applying an adjustment of 1 followed by an adjustment of -1 when base likelihood is VERY_LIKELY will result in a final likelihood of LIKELY. Either this or fixed_likelihood can be set.
    fixed_likelihood str
    Set the likelihood of a finding to a fixed value. Either this or relative_likelihood can be set. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    relative_likelihood int
    Increase or decrease the likelihood by the specified number of levels. For example, if a finding would be POSSIBLE without the detection rule and relativeLikelihood is 1, then it is upgraded to LIKELY, while a value of -1 would downgrade it to UNLIKELY. Likelihood may never drop below VERY_UNLIKELY or exceed VERY_LIKELY, so applying an adjustment of 1 followed by an adjustment of -1 when base likelihood is VERY_LIKELY will result in a final likelihood of LIKELY. Either this or fixed_likelihood can be set.
    fixedLikelihood String
    Set the likelihood of a finding to a fixed value. Either this or relative_likelihood can be set. Possible values are: VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, VERY_LIKELY.
    relativeLikelihood Number
    Increase or decrease the likelihood by the specified number of levels. For example, if a finding would be POSSIBLE without the detection rule and relativeLikelihood is 1, then it is upgraded to LIKELY, while a value of -1 would downgrade it to UNLIKELY. Likelihood may never drop below VERY_UNLIKELY or exceed VERY_LIKELY, so applying an adjustment of 1 followed by an adjustment of -1 when base likelihood is VERY_LIKELY will result in a final likelihood of LIKELY. Either this or fixed_likelihood can be set.

    PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximity, PreventionJobTriggerInspectJobInspectConfigRuleSetRuleHotwordRuleProximityArgs

    WindowAfter int
    Number of characters after the finding to consider. Either this or window_before must be specified
    WindowBefore int
    Number of characters before the finding to consider. Either this or window_after must be specified
    WindowAfter int
    Number of characters after the finding to consider. Either this or window_before must be specified
    WindowBefore int
    Number of characters before the finding to consider. Either this or window_after must be specified
    windowAfter Integer
    Number of characters after the finding to consider. Either this or window_before must be specified
    windowBefore Integer
    Number of characters before the finding to consider. Either this or window_after must be specified
    windowAfter number
    Number of characters after the finding to consider. Either this or window_before must be specified
    windowBefore number
    Number of characters before the finding to consider. Either this or window_after must be specified
    window_after int
    Number of characters after the finding to consider. Either this or window_before must be specified
    window_before int
    Number of characters before the finding to consider. Either this or window_after must be specified
    windowAfter Number
    Number of characters after the finding to consider. Either this or window_before must be specified
    windowBefore Number
    Number of characters before the finding to consider. Either this or window_after must be specified

    PreventionJobTriggerInspectJobStorageConfig, PreventionJobTriggerInspectJobStorageConfigArgs

    BigQueryOptions PreventionJobTriggerInspectJobStorageConfigBigQueryOptions
    Options defining BigQuery table and row identifiers. Structure is documented below.
    CloudStorageOptions PreventionJobTriggerInspectJobStorageConfigCloudStorageOptions
    Options defining a file or a set of files within a Google Cloud Storage bucket. Structure is documented below.
    DatastoreOptions PreventionJobTriggerInspectJobStorageConfigDatastoreOptions
    Options defining a data set within Google Cloud Datastore. Structure is documented below.
    HybridOptions PreventionJobTriggerInspectJobStorageConfigHybridOptions
    Configuration to control jobs where the content being inspected is outside of Google Cloud Platform. Structure is documented below.
    TimespanConfig PreventionJobTriggerInspectJobStorageConfigTimespanConfig
    Configuration of the timespan of the items to include in scanning Structure is documented below.
    BigQueryOptions PreventionJobTriggerInspectJobStorageConfigBigQueryOptions
    Options defining BigQuery table and row identifiers. Structure is documented below.
    CloudStorageOptions PreventionJobTriggerInspectJobStorageConfigCloudStorageOptions
    Options defining a file or a set of files within a Google Cloud Storage bucket. Structure is documented below.
    DatastoreOptions PreventionJobTriggerInspectJobStorageConfigDatastoreOptions
    Options defining a data set within Google Cloud Datastore. Structure is documented below.
    HybridOptions PreventionJobTriggerInspectJobStorageConfigHybridOptions
    Configuration to control jobs where the content being inspected is outside of Google Cloud Platform. Structure is documented below.
    TimespanConfig PreventionJobTriggerInspectJobStorageConfigTimespanConfig
    Configuration of the timespan of the items to include in scanning Structure is documented below.
    bigQueryOptions PreventionJobTriggerInspectJobStorageConfigBigQueryOptions
    Options defining BigQuery table and row identifiers. Structure is documented below.
    cloudStorageOptions PreventionJobTriggerInspectJobStorageConfigCloudStorageOptions
    Options defining a file or a set of files within a Google Cloud Storage bucket. Structure is documented below.
    datastoreOptions PreventionJobTriggerInspectJobStorageConfigDatastoreOptions
    Options defining a data set within Google Cloud Datastore. Structure is documented below.
    hybridOptions PreventionJobTriggerInspectJobStorageConfigHybridOptions
    Configuration to control jobs where the content being inspected is outside of Google Cloud Platform. Structure is documented below.
    timespanConfig PreventionJobTriggerInspectJobStorageConfigTimespanConfig
    Configuration of the timespan of the items to include in scanning Structure is documented below.
    bigQueryOptions PreventionJobTriggerInspectJobStorageConfigBigQueryOptions
    Options defining BigQuery table and row identifiers. Structure is documented below.
    cloudStorageOptions PreventionJobTriggerInspectJobStorageConfigCloudStorageOptions
    Options defining a file or a set of files within a Google Cloud Storage bucket. Structure is documented below.
    datastoreOptions PreventionJobTriggerInspectJobStorageConfigDatastoreOptions
    Options defining a data set within Google Cloud Datastore. Structure is documented below.
    hybridOptions PreventionJobTriggerInspectJobStorageConfigHybridOptions
    Configuration to control jobs where the content being inspected is outside of Google Cloud Platform. Structure is documented below.
    timespanConfig PreventionJobTriggerInspectJobStorageConfigTimespanConfig
    Configuration of the timespan of the items to include in scanning Structure is documented below.
    big_query_options PreventionJobTriggerInspectJobStorageConfigBigQueryOptions
    Options defining BigQuery table and row identifiers. Structure is documented below.
    cloud_storage_options PreventionJobTriggerInspectJobStorageConfigCloudStorageOptions
    Options defining a file or a set of files within a Google Cloud Storage bucket. Structure is documented below.
    datastore_options PreventionJobTriggerInspectJobStorageConfigDatastoreOptions
    Options defining a data set within Google Cloud Datastore. Structure is documented below.
    hybrid_options PreventionJobTriggerInspectJobStorageConfigHybridOptions
    Configuration to control jobs where the content being inspected is outside of Google Cloud Platform. Structure is documented below.
    timespan_config PreventionJobTriggerInspectJobStorageConfigTimespanConfig
    Configuration of the timespan of the items to include in scanning Structure is documented below.
    bigQueryOptions Property Map
    Options defining BigQuery table and row identifiers. Structure is documented below.
    cloudStorageOptions Property Map
    Options defining a file or a set of files within a Google Cloud Storage bucket. Structure is documented below.
    datastoreOptions Property Map
    Options defining a data set within Google Cloud Datastore. Structure is documented below.
    hybridOptions Property Map
    Configuration to control jobs where the content being inspected is outside of Google Cloud Platform. Structure is documented below.
    timespanConfig Property Map
    Configuration of the timespan of the items to include in scanning Structure is documented below.

    PreventionJobTriggerInspectJobStorageConfigBigQueryOptions, PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsArgs

    TableReference PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReference
    Set of files to scan. Structure is documented below.
    ExcludedFields List<PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsExcludedField>
    References to fields excluded from scanning. This allows you to skip inspection of entire columns which you know have no findings. Structure is documented below.
    IdentifyingFields List<PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingField>
    Specifies the BigQuery fields that will be returned with findings. If not specified, no identifying fields will be returned for findings. Structure is documented below.
    IncludedFields List<PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIncludedField>
    Limit scanning only to these fields. Structure is documented below.
    RowsLimit int
    Max number of rows to scan. If the table has more rows than this value, the rest of the rows are omitted. If not set, or if set to 0, all rows will be scanned. Only one of rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
    RowsLimitPercent int
    Max percentage of rows to scan. The rest are omitted. The number of rows scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit. Defaults to 0. Only one of rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
    SampleMethod string
    How to sample rows if not all rows are scanned. Meaningful only when used in conjunction with either rowsLimit or rowsLimitPercent. If not specified, rows are scanned in the order BigQuery reads them. Default value is TOP. Possible values are: TOP, RANDOM_START.
    TableReference PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReference
    Set of files to scan. Structure is documented below.
    ExcludedFields []PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsExcludedField
    References to fields excluded from scanning. This allows you to skip inspection of entire columns which you know have no findings. Structure is documented below.
    IdentifyingFields []PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingField
    Specifies the BigQuery fields that will be returned with findings. If not specified, no identifying fields will be returned for findings. Structure is documented below.
    IncludedFields []PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIncludedField
    Limit scanning only to these fields. Structure is documented below.
    RowsLimit int
    Max number of rows to scan. If the table has more rows than this value, the rest of the rows are omitted. If not set, or if set to 0, all rows will be scanned. Only one of rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
    RowsLimitPercent int
    Max percentage of rows to scan. The rest are omitted. The number of rows scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit. Defaults to 0. Only one of rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
    SampleMethod string
    How to sample rows if not all rows are scanned. Meaningful only when used in conjunction with either rowsLimit or rowsLimitPercent. If not specified, rows are scanned in the order BigQuery reads them. Default value is TOP. Possible values are: TOP, RANDOM_START.
    tableReference PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReference
    Set of files to scan. Structure is documented below.
    excludedFields List<PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsExcludedField>
    References to fields excluded from scanning. This allows you to skip inspection of entire columns which you know have no findings. Structure is documented below.
    identifyingFields List<PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingField>
    Specifies the BigQuery fields that will be returned with findings. If not specified, no identifying fields will be returned for findings. Structure is documented below.
    includedFields List<PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIncludedField>
    Limit scanning only to these fields. Structure is documented below.
    rowsLimit Integer
    Max number of rows to scan. If the table has more rows than this value, the rest of the rows are omitted. If not set, or if set to 0, all rows will be scanned. Only one of rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
    rowsLimitPercent Integer
    Max percentage of rows to scan. The rest are omitted. The number of rows scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit. Defaults to 0. Only one of rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
    sampleMethod String
    How to sample rows if not all rows are scanned. Meaningful only when used in conjunction with either rowsLimit or rowsLimitPercent. If not specified, rows are scanned in the order BigQuery reads them. Default value is TOP. Possible values are: TOP, RANDOM_START.
    tableReference PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReference
    Set of files to scan. Structure is documented below.
    excludedFields PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsExcludedField[]
    References to fields excluded from scanning. This allows you to skip inspection of entire columns which you know have no findings. Structure is documented below.
    identifyingFields PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingField[]
    Specifies the BigQuery fields that will be returned with findings. If not specified, no identifying fields will be returned for findings. Structure is documented below.
    includedFields PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIncludedField[]
    Limit scanning only to these fields. Structure is documented below.
    rowsLimit number
    Max number of rows to scan. If the table has more rows than this value, the rest of the rows are omitted. If not set, or if set to 0, all rows will be scanned. Only one of rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
    rowsLimitPercent number
    Max percentage of rows to scan. The rest are omitted. The number of rows scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit. Defaults to 0. Only one of rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
    sampleMethod string
    How to sample rows if not all rows are scanned. Meaningful only when used in conjunction with either rowsLimit or rowsLimitPercent. If not specified, rows are scanned in the order BigQuery reads them. Default value is TOP. Possible values are: TOP, RANDOM_START.
    table_reference PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReference
    Set of files to scan. Structure is documented below.
    excluded_fields Sequence[PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsExcludedField]
    References to fields excluded from scanning. This allows you to skip inspection of entire columns which you know have no findings. Structure is documented below.
    identifying_fields Sequence[PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingField]
    Specifies the BigQuery fields that will be returned with findings. If not specified, no identifying fields will be returned for findings. Structure is documented below.
    included_fields Sequence[PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIncludedField]
    Limit scanning only to these fields. Structure is documented below.
    rows_limit int
    Max number of rows to scan. If the table has more rows than this value, the rest of the rows are omitted. If not set, or if set to 0, all rows will be scanned. Only one of rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
    rows_limit_percent int
    Max percentage of rows to scan. The rest are omitted. The number of rows scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit. Defaults to 0. Only one of rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
    sample_method str
    How to sample rows if not all rows are scanned. Meaningful only when used in conjunction with either rowsLimit or rowsLimitPercent. If not specified, rows are scanned in the order BigQuery reads them. Default value is TOP. Possible values are: TOP, RANDOM_START.
    tableReference Property Map
    Set of files to scan. Structure is documented below.
    excludedFields List<Property Map>
    References to fields excluded from scanning. This allows you to skip inspection of entire columns which you know have no findings. Structure is documented below.
    identifyingFields List<Property Map>
    Specifies the BigQuery fields that will be returned with findings. If not specified, no identifying fields will be returned for findings. Structure is documented below.
    includedFields List<Property Map>
    Limit scanning only to these fields. Structure is documented below.
    rowsLimit Number
    Max number of rows to scan. If the table has more rows than this value, the rest of the rows are omitted. If not set, or if set to 0, all rows will be scanned. Only one of rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
    rowsLimitPercent Number
    Max percentage of rows to scan. The rest are omitted. The number of rows scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit. Defaults to 0. Only one of rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
    sampleMethod String
    How to sample rows if not all rows are scanned. Meaningful only when used in conjunction with either rowsLimit or rowsLimitPercent. If not specified, rows are scanned in the order BigQuery reads them. Default value is TOP. Possible values are: TOP, RANDOM_START.

    PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsExcludedField, PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsExcludedFieldArgs

    Name string
    Name describing the field excluded from scanning.
    Name string
    Name describing the field excluded from scanning.
    name String
    Name describing the field excluded from scanning.
    name string
    Name describing the field excluded from scanning.
    name str
    Name describing the field excluded from scanning.
    name String
    Name describing the field excluded from scanning.

    PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingField, PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldArgs

    Name string
    Name describing the field.
    Name string
    Name describing the field.
    name String
    Name describing the field.
    name string
    Name describing the field.
    name str
    Name describing the field.
    name String
    Name describing the field.

    PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIncludedField, PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIncludedFieldArgs

    Name string
    Name describing the field to which scanning is limited.
    Name string
    Name describing the field to which scanning is limited.
    name String
    Name describing the field to which scanning is limited.
    name string
    Name describing the field to which scanning is limited.
    name str
    Name describing the field to which scanning is limited.
    name String
    Name describing the field to which scanning is limited.

    PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReference, PreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReferenceArgs

    DatasetId string
    The dataset ID of the table.
    ProjectId string
    The Google Cloud Platform project ID of the project containing the table.
    TableId string
    The name of the table.
    DatasetId string
    The dataset ID of the table.
    ProjectId string
    The Google Cloud Platform project ID of the project containing the table.
    TableId string
    The name of the table.
    datasetId String
    The dataset ID of the table.
    projectId String
    The Google Cloud Platform project ID of the project containing the table.
    tableId String
    The name of the table.
    datasetId string
    The dataset ID of the table.
    projectId string
    The Google Cloud Platform project ID of the project containing the table.
    tableId string
    The name of the table.
    dataset_id str
    The dataset ID of the table.
    project_id str
    The Google Cloud Platform project ID of the project containing the table.
    table_id str
    The name of the table.
    datasetId String
    The dataset ID of the table.
    projectId String
    The Google Cloud Platform project ID of the project containing the table.
    tableId String
    The name of the table.

    PreventionJobTriggerInspectJobStorageConfigCloudStorageOptions, PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsArgs

    FileSet PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSet
    Set of files to scan. Structure is documented below.
    BytesLimitPerFile int
    Max number of bytes to scan from a file. If a scanned file's size is bigger than this value then the rest of the bytes are omitted.
    BytesLimitPerFilePercent int
    Max percentage of bytes to scan from a file. The rest are omitted. The number of bytes scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit.
    FileTypes List<string>
    List of file type groups to include in the scan. If empty, all files are scanned and available data format processors are applied. In addition, the binary content of the selected files is always scanned as well. Images are scanned only as binary if the specified region does not support image inspection and no fileTypes were specified. Each value may be one of: BINARY_FILE, TEXT_FILE, IMAGE, WORD, PDF, AVRO, CSV, TSV, POWERPOINT, EXCEL.
    FilesLimitPercent int
    Limits the number of files to scan to this percentage of the input FileSet. Number of files scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit.
    SampleMethod string
    How to sample bytes if not all bytes are scanned. Meaningful only when used in conjunction with bytesLimitPerFile. If not specified, scanning would start from the top. Possible values are: TOP, RANDOM_START.
    FileSet PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSet
    Set of files to scan. Structure is documented below.
    BytesLimitPerFile int
    Max number of bytes to scan from a file. If a scanned file's size is bigger than this value then the rest of the bytes are omitted.
    BytesLimitPerFilePercent int
    Max percentage of bytes to scan from a file. The rest are omitted. The number of bytes scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit.
    FileTypes []string
    List of file type groups to include in the scan. If empty, all files are scanned and available data format processors are applied. In addition, the binary content of the selected files is always scanned as well. Images are scanned only as binary if the specified region does not support image inspection and no fileTypes were specified. Each value may be one of: BINARY_FILE, TEXT_FILE, IMAGE, WORD, PDF, AVRO, CSV, TSV, POWERPOINT, EXCEL.
    FilesLimitPercent int
    Limits the number of files to scan to this percentage of the input FileSet. Number of files scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit.
    SampleMethod string
    How to sample bytes if not all bytes are scanned. Meaningful only when used in conjunction with bytesLimitPerFile. If not specified, scanning would start from the top. Possible values are: TOP, RANDOM_START.
    fileSet PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSet
    Set of files to scan. Structure is documented below.
    bytesLimitPerFile Integer
    Max number of bytes to scan from a file. If a scanned file's size is bigger than this value then the rest of the bytes are omitted.
    bytesLimitPerFilePercent Integer
    Max percentage of bytes to scan from a file. The rest are omitted. The number of bytes scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit.
    fileTypes List<String>
    List of file type groups to include in the scan. If empty, all files are scanned and available data format processors are applied. In addition, the binary content of the selected files is always scanned as well. Images are scanned only as binary if the specified region does not support image inspection and no fileTypes were specified. Each value may be one of: BINARY_FILE, TEXT_FILE, IMAGE, WORD, PDF, AVRO, CSV, TSV, POWERPOINT, EXCEL.
    filesLimitPercent Integer
    Limits the number of files to scan to this percentage of the input FileSet. Number of files scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit.
    sampleMethod String
    How to sample bytes if not all bytes are scanned. Meaningful only when used in conjunction with bytesLimitPerFile. If not specified, scanning would start from the top. Possible values are: TOP, RANDOM_START.
    fileSet PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSet
    Set of files to scan. Structure is documented below.
    bytesLimitPerFile number
    Max number of bytes to scan from a file. If a scanned file's size is bigger than this value then the rest of the bytes are omitted.
    bytesLimitPerFilePercent number
    Max percentage of bytes to scan from a file. The rest are omitted. The number of bytes scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit.
    fileTypes string[]
    List of file type groups to include in the scan. If empty, all files are scanned and available data format processors are applied. In addition, the binary content of the selected files is always scanned as well. Images are scanned only as binary if the specified region does not support image inspection and no fileTypes were specified. Each value may be one of: BINARY_FILE, TEXT_FILE, IMAGE, WORD, PDF, AVRO, CSV, TSV, POWERPOINT, EXCEL.
    filesLimitPercent number
    Limits the number of files to scan to this percentage of the input FileSet. Number of files scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit.
    sampleMethod string
    How to sample bytes if not all bytes are scanned. Meaningful only when used in conjunction with bytesLimitPerFile. If not specified, scanning would start from the top. Possible values are: TOP, RANDOM_START.
    file_set PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSet
    Set of files to scan. Structure is documented below.
    bytes_limit_per_file int
    Max number of bytes to scan from a file. If a scanned file's size is bigger than this value then the rest of the bytes are omitted.
    bytes_limit_per_file_percent int
    Max percentage of bytes to scan from a file. The rest are omitted. The number of bytes scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit.
    file_types Sequence[str]
    List of file type groups to include in the scan. If empty, all files are scanned and available data format processors are applied. In addition, the binary content of the selected files is always scanned as well. Images are scanned only as binary if the specified region does not support image inspection and no fileTypes were specified. Each value may be one of: BINARY_FILE, TEXT_FILE, IMAGE, WORD, PDF, AVRO, CSV, TSV, POWERPOINT, EXCEL.
    files_limit_percent int
    Limits the number of files to scan to this percentage of the input FileSet. Number of files scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit.
    sample_method str
    How to sample bytes if not all bytes are scanned. Meaningful only when used in conjunction with bytesLimitPerFile. If not specified, scanning would start from the top. Possible values are: TOP, RANDOM_START.
    fileSet Property Map
    Set of files to scan. Structure is documented below.
    bytesLimitPerFile Number
    Max number of bytes to scan from a file. If a scanned file's size is bigger than this value then the rest of the bytes are omitted.
    bytesLimitPerFilePercent Number
    Max percentage of bytes to scan from a file. The rest are omitted. The number of bytes scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit.
    fileTypes List<String>
    List of file type groups to include in the scan. If empty, all files are scanned and available data format processors are applied. In addition, the binary content of the selected files is always scanned as well. Images are scanned only as binary if the specified region does not support image inspection and no fileTypes were specified. Each value may be one of: BINARY_FILE, TEXT_FILE, IMAGE, WORD, PDF, AVRO, CSV, TSV, POWERPOINT, EXCEL.
    filesLimitPercent Number
    Limits the number of files to scan to this percentage of the input FileSet. Number of files scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and 100 means no limit.
    sampleMethod String
    How to sample bytes if not all bytes are scanned. Meaningful only when used in conjunction with bytesLimitPerFile. If not specified, scanning would start from the top. Possible values are: TOP, RANDOM_START.

    PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSet, PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetArgs

    RegexFileSet PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetRegexFileSet
    The regex-filtered set of files to scan. Structure is documented below.
    Url string
    The Cloud Storage url of the file(s) to scan, in the format gs://<bucket>/<path>. Trailing wildcard in the path is allowed. If the url ends in a trailing slash, the bucket or directory represented by the url will be scanned non-recursively (content in sub-directories will not be scanned). This means that gs://mybucket/ is equivalent to gs://mybucket/*, and gs://mybucket/directory/ is equivalent to gs://mybucket/directory/*.
    RegexFileSet PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetRegexFileSet
    The regex-filtered set of files to scan. Structure is documented below.
    Url string
    The Cloud Storage url of the file(s) to scan, in the format gs://<bucket>/<path>. Trailing wildcard in the path is allowed. If the url ends in a trailing slash, the bucket or directory represented by the url will be scanned non-recursively (content in sub-directories will not be scanned). This means that gs://mybucket/ is equivalent to gs://mybucket/*, and gs://mybucket/directory/ is equivalent to gs://mybucket/directory/*.
    regexFileSet PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetRegexFileSet
    The regex-filtered set of files to scan. Structure is documented below.
    url String
    The Cloud Storage url of the file(s) to scan, in the format gs://<bucket>/<path>. Trailing wildcard in the path is allowed. If the url ends in a trailing slash, the bucket or directory represented by the url will be scanned non-recursively (content in sub-directories will not be scanned). This means that gs://mybucket/ is equivalent to gs://mybucket/*, and gs://mybucket/directory/ is equivalent to gs://mybucket/directory/*.
    regexFileSet PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetRegexFileSet
    The regex-filtered set of files to scan. Structure is documented below.
    url string
    The Cloud Storage url of the file(s) to scan, in the format gs://<bucket>/<path>. Trailing wildcard in the path is allowed. If the url ends in a trailing slash, the bucket or directory represented by the url will be scanned non-recursively (content in sub-directories will not be scanned). This means that gs://mybucket/ is equivalent to gs://mybucket/*, and gs://mybucket/directory/ is equivalent to gs://mybucket/directory/*.
    regex_file_set PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetRegexFileSet
    The regex-filtered set of files to scan. Structure is documented below.
    url str
    The Cloud Storage url of the file(s) to scan, in the format gs://<bucket>/<path>. Trailing wildcard in the path is allowed. If the url ends in a trailing slash, the bucket or directory represented by the url will be scanned non-recursively (content in sub-directories will not be scanned). This means that gs://mybucket/ is equivalent to gs://mybucket/*, and gs://mybucket/directory/ is equivalent to gs://mybucket/directory/*.
    regexFileSet Property Map
    The regex-filtered set of files to scan. Structure is documented below.
    url String
    The Cloud Storage url of the file(s) to scan, in the format gs://<bucket>/<path>. Trailing wildcard in the path is allowed. If the url ends in a trailing slash, the bucket or directory represented by the url will be scanned non-recursively (content in sub-directories will not be scanned). This means that gs://mybucket/ is equivalent to gs://mybucket/*, and gs://mybucket/directory/ is equivalent to gs://mybucket/directory/*.

    PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetRegexFileSet, PreventionJobTriggerInspectJobStorageConfigCloudStorageOptionsFileSetRegexFileSetArgs

    BucketName string
    The name of a Cloud Storage bucket.
    ExcludeRegexes List<string>
    A list of regular expressions matching file paths to exclude. All files in the bucket that match at least one of these regular expressions will be excluded from the scan.
    IncludeRegexes List<string>
    A list of regular expressions matching file paths to include. All files in the bucket that match at least one of these regular expressions will be included in the set of files, except for those that also match an item in excludeRegex. Leaving this field empty will match all files by default (this is equivalent to including .* in the list)
    BucketName string
    The name of a Cloud Storage bucket.
    ExcludeRegexes []string
    A list of regular expressions matching file paths to exclude. All files in the bucket that match at least one of these regular expressions will be excluded from the scan.
    IncludeRegexes []string
    A list of regular expressions matching file paths to include. All files in the bucket that match at least one of these regular expressions will be included in the set of files, except for those that also match an item in excludeRegex. Leaving this field empty will match all files by default (this is equivalent to including .* in the list)
    bucketName String
    The name of a Cloud Storage bucket.
    excludeRegexes List<String>
    A list of regular expressions matching file paths to exclude. All files in the bucket that match at least one of these regular expressions will be excluded from the scan.
    includeRegexes List<String>
    A list of regular expressions matching file paths to include. All files in the bucket that match at least one of these regular expressions will be included in the set of files, except for those that also match an item in excludeRegex. Leaving this field empty will match all files by default (this is equivalent to including .* in the list)
    bucketName string
    The name of a Cloud Storage bucket.
    excludeRegexes string[]
    A list of regular expressions matching file paths to exclude. All files in the bucket that match at least one of these regular expressions will be excluded from the scan.
    includeRegexes string[]
    A list of regular expressions matching file paths to include. All files in the bucket that match at least one of these regular expressions will be included in the set of files, except for those that also match an item in excludeRegex. Leaving this field empty will match all files by default (this is equivalent to including .* in the list)
    bucket_name str
    The name of a Cloud Storage bucket.
    exclude_regexes Sequence[str]
    A list of regular expressions matching file paths to exclude. All files in the bucket that match at least one of these regular expressions will be excluded from the scan.
    include_regexes Sequence[str]
    A list of regular expressions matching file paths to include. All files in the bucket that match at least one of these regular expressions will be included in the set of files, except for those that also match an item in excludeRegex. Leaving this field empty will match all files by default (this is equivalent to including .* in the list)
    bucketName String
    The name of a Cloud Storage bucket.
    excludeRegexes List<String>
    A list of regular expressions matching file paths to exclude. All files in the bucket that match at least one of these regular expressions will be excluded from the scan.
    includeRegexes List<String>
    A list of regular expressions matching file paths to include. All files in the bucket that match at least one of these regular expressions will be included in the set of files, except for those that also match an item in excludeRegex. Leaving this field empty will match all files by default (this is equivalent to including .* in the list)

    PreventionJobTriggerInspectJobStorageConfigDatastoreOptions, PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsArgs

    Kind PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsKind
    A representation of a Datastore kind. Structure is documented below.
    PartitionId PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsPartitionId
    Datastore partition ID. A partition ID identifies a grouping of entities. The grouping is always by project and namespace, however the namespace ID may be empty. Structure is documented below.
    Kind PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsKind
    A representation of a Datastore kind. Structure is documented below.
    PartitionId PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsPartitionId
    Datastore partition ID. A partition ID identifies a grouping of entities. The grouping is always by project and namespace, however the namespace ID may be empty. Structure is documented below.
    kind PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsKind
    A representation of a Datastore kind. Structure is documented below.
    partitionId PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsPartitionId
    Datastore partition ID. A partition ID identifies a grouping of entities. The grouping is always by project and namespace, however the namespace ID may be empty. Structure is documented below.
    kind PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsKind
    A representation of a Datastore kind. Structure is documented below.
    partitionId PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsPartitionId
    Datastore partition ID. A partition ID identifies a grouping of entities. The grouping is always by project and namespace, however the namespace ID may be empty. Structure is documented below.
    kind PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsKind
    A representation of a Datastore kind. Structure is documented below.
    partition_id PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsPartitionId
    Datastore partition ID. A partition ID identifies a grouping of entities. The grouping is always by project and namespace, however the namespace ID may be empty. Structure is documented below.
    kind Property Map
    A representation of a Datastore kind. Structure is documented below.
    partitionId Property Map
    Datastore partition ID. A partition ID identifies a grouping of entities. The grouping is always by project and namespace, however the namespace ID may be empty. Structure is documented below.

    PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsKind, PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsKindArgs

    Name string
    The name of the Datastore kind.
    Name string
    The name of the Datastore kind.
    name String
    The name of the Datastore kind.
    name string
    The name of the Datastore kind.
    name str
    The name of the Datastore kind.
    name String
    The name of the Datastore kind.

    PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsPartitionId, PreventionJobTriggerInspectJobStorageConfigDatastoreOptionsPartitionIdArgs

    ProjectId string
    The ID of the project to which the entities belong.
    NamespaceId string
    If not empty, the ID of the namespace to which the entities belong.
    ProjectId string
    The ID of the project to which the entities belong.
    NamespaceId string
    If not empty, the ID of the namespace to which the entities belong.
    projectId String
    The ID of the project to which the entities belong.
    namespaceId String
    If not empty, the ID of the namespace to which the entities belong.
    projectId string
    The ID of the project to which the entities belong.
    namespaceId string
    If not empty, the ID of the namespace to which the entities belong.
    project_id str
    The ID of the project to which the entities belong.
    namespace_id str
    If not empty, the ID of the namespace to which the entities belong.
    projectId String
    The ID of the project to which the entities belong.
    namespaceId String
    If not empty, the ID of the namespace to which the entities belong.

    PreventionJobTriggerInspectJobStorageConfigHybridOptions, PreventionJobTriggerInspectJobStorageConfigHybridOptionsArgs

    Description string
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    Labels Dictionary<string, string>
    To organize findings, these labels will be added to each finding. Label keys must be between 1 and 63 characters long and must conform to the following regular expression: a-z?. Label values must be between 0 and 63 characters long and must conform to the regular expression (a-z?)?. No more than 10 labels can be associated with a given finding. Examples:

    • "environment" : "production"
    • "pipeline" : "etl"
    RequiredFindingLabelKeys List<string>
    These are labels that each inspection request must include within their 'finding_labels' map. Request may contain others, but any missing one of these will be rejected. Label keys must be between 1 and 63 characters long and must conform to the following regular expression: a-z?. No more than 10 keys can be required.
    TableOptions PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptions
    If the container is a table, additional information to make findings meaningful such as the columns that are primary keys. Structure is documented below.
    Description string
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    Labels map[string]string
    To organize findings, these labels will be added to each finding. Label keys must be between 1 and 63 characters long and must conform to the following regular expression: a-z?. Label values must be between 0 and 63 characters long and must conform to the regular expression (a-z?)?. No more than 10 labels can be associated with a given finding. Examples:

    • "environment" : "production"
    • "pipeline" : "etl"
    RequiredFindingLabelKeys []string
    These are labels that each inspection request must include within their 'finding_labels' map. Request may contain others, but any missing one of these will be rejected. Label keys must be between 1 and 63 characters long and must conform to the following regular expression: a-z?. No more than 10 keys can be required.
    TableOptions PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptions
    If the container is a table, additional information to make findings meaningful such as the columns that are primary keys. Structure is documented below.
    description String
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    labels Map<String,String>
    To organize findings, these labels will be added to each finding. Label keys must be between 1 and 63 characters long and must conform to the following regular expression: a-z?. Label values must be between 0 and 63 characters long and must conform to the regular expression (a-z?)?. No more than 10 labels can be associated with a given finding. Examples:

    • "environment" : "production"
    • "pipeline" : "etl"
    requiredFindingLabelKeys List<String>
    These are labels that each inspection request must include within their 'finding_labels' map. Request may contain others, but any missing one of these will be rejected. Label keys must be between 1 and 63 characters long and must conform to the following regular expression: a-z?. No more than 10 keys can be required.
    tableOptions PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptions
    If the container is a table, additional information to make findings meaningful such as the columns that are primary keys. Structure is documented below.
    description string
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    labels {[key: string]: string}
    To organize findings, these labels will be added to each finding. Label keys must be between 1 and 63 characters long and must conform to the following regular expression: a-z?. Label values must be between 0 and 63 characters long and must conform to the regular expression (a-z?)?. No more than 10 labels can be associated with a given finding. Examples:

    • "environment" : "production"
    • "pipeline" : "etl"
    requiredFindingLabelKeys string[]
    These are labels that each inspection request must include within their 'finding_labels' map. Request may contain others, but any missing one of these will be rejected. Label keys must be between 1 and 63 characters long and must conform to the following regular expression: a-z?. No more than 10 keys can be required.
    tableOptions PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptions
    If the container is a table, additional information to make findings meaningful such as the columns that are primary keys. Structure is documented below.
    description str
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    labels Mapping[str, str]
    To organize findings, these labels will be added to each finding. Label keys must be between 1 and 63 characters long and must conform to the following regular expression: a-z?. Label values must be between 0 and 63 characters long and must conform to the regular expression (a-z?)?. No more than 10 labels can be associated with a given finding. Examples:

    • "environment" : "production"
    • "pipeline" : "etl"
    required_finding_label_keys Sequence[str]
    These are labels that each inspection request must include within their 'finding_labels' map. Request may contain others, but any missing one of these will be rejected. Label keys must be between 1 and 63 characters long and must conform to the following regular expression: a-z?. No more than 10 keys can be required.
    table_options PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptions
    If the container is a table, additional information to make findings meaningful such as the columns that are primary keys. Structure is documented below.
    description String
    A short description of where the data is coming from. Will be stored once in the job. 256 max length.
    labels Map<String>
    To organize findings, these labels will be added to each finding. Label keys must be between 1 and 63 characters long and must conform to the following regular expression: a-z?. Label values must be between 0 and 63 characters long and must conform to the regular expression (a-z?)?. No more than 10 labels can be associated with a given finding. Examples:

    • "environment" : "production"
    • "pipeline" : "etl"
    requiredFindingLabelKeys List<String>
    These are labels that each inspection request must include within their 'finding_labels' map. Request may contain others, but any missing one of these will be rejected. Label keys must be between 1 and 63 characters long and must conform to the following regular expression: a-z?. No more than 10 keys can be required.
    tableOptions Property Map
    If the container is a table, additional information to make findings meaningful such as the columns that are primary keys. Structure is documented below.

    PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptions, PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsArgs

    IdentifyingFields List<PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingField>
    The columns that are the primary keys for table objects included in ContentItem. A copy of this cell's value will stored alongside alongside each finding so that the finding can be traced to the specific row it came from. No more than 3 may be provided. Structure is documented below.
    IdentifyingFields []PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingField
    The columns that are the primary keys for table objects included in ContentItem. A copy of this cell's value will stored alongside alongside each finding so that the finding can be traced to the specific row it came from. No more than 3 may be provided. Structure is documented below.
    identifyingFields List<PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingField>
    The columns that are the primary keys for table objects included in ContentItem. A copy of this cell's value will stored alongside alongside each finding so that the finding can be traced to the specific row it came from. No more than 3 may be provided. Structure is documented below.
    identifyingFields PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingField[]
    The columns that are the primary keys for table objects included in ContentItem. A copy of this cell's value will stored alongside alongside each finding so that the finding can be traced to the specific row it came from. No more than 3 may be provided. Structure is documented below.
    identifying_fields Sequence[PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingField]
    The columns that are the primary keys for table objects included in ContentItem. A copy of this cell's value will stored alongside alongside each finding so that the finding can be traced to the specific row it came from. No more than 3 may be provided. Structure is documented below.
    identifyingFields List<Property Map>
    The columns that are the primary keys for table objects included in ContentItem. A copy of this cell's value will stored alongside alongside each finding so that the finding can be traced to the specific row it came from. No more than 3 may be provided. Structure is documented below.

    PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingField, PreventionJobTriggerInspectJobStorageConfigHybridOptionsTableOptionsIdentifyingFieldArgs

    Name string
    Name describing the field.
    Name string
    Name describing the field.
    name String
    Name describing the field.
    name string
    Name describing the field.
    name str
    Name describing the field.
    name String
    Name describing the field.

    PreventionJobTriggerInspectJobStorageConfigTimespanConfig, PreventionJobTriggerInspectJobStorageConfigTimespanConfigArgs

    EnableAutoPopulationOfTimespanConfig bool
    When the job is started by a JobTrigger we will automatically figure out a valid startTime to avoid scanning files that have not been modified since the last time the JobTrigger executed. This will be based on the time of the execution of the last run of the JobTrigger or the timespan endTime used in the last run of the JobTrigger.
    EndTime string
    Exclude files, tables, or rows newer than this value. If not set, no upper time limit is applied.
    StartTime string
    Exclude files, tables, or rows older than this value. If not set, no lower time limit is applied.
    TimestampField PreventionJobTriggerInspectJobStorageConfigTimespanConfigTimestampField
    Specification of the field containing the timestamp of scanned items. Structure is documented below.
    EnableAutoPopulationOfTimespanConfig bool
    When the job is started by a JobTrigger we will automatically figure out a valid startTime to avoid scanning files that have not been modified since the last time the JobTrigger executed. This will be based on the time of the execution of the last run of the JobTrigger or the timespan endTime used in the last run of the JobTrigger.
    EndTime string
    Exclude files, tables, or rows newer than this value. If not set, no upper time limit is applied.
    StartTime string
    Exclude files, tables, or rows older than this value. If not set, no lower time limit is applied.
    TimestampField PreventionJobTriggerInspectJobStorageConfigTimespanConfigTimestampField
    Specification of the field containing the timestamp of scanned items. Structure is documented below.
    enableAutoPopulationOfTimespanConfig Boolean
    When the job is started by a JobTrigger we will automatically figure out a valid startTime to avoid scanning files that have not been modified since the last time the JobTrigger executed. This will be based on the time of the execution of the last run of the JobTrigger or the timespan endTime used in the last run of the JobTrigger.
    endTime String
    Exclude files, tables, or rows newer than this value. If not set, no upper time limit is applied.
    startTime String
    Exclude files, tables, or rows older than this value. If not set, no lower time limit is applied.
    timestampField PreventionJobTriggerInspectJobStorageConfigTimespanConfigTimestampField
    Specification of the field containing the timestamp of scanned items. Structure is documented below.
    enableAutoPopulationOfTimespanConfig boolean
    When the job is started by a JobTrigger we will automatically figure out a valid startTime to avoid scanning files that have not been modified since the last time the JobTrigger executed. This will be based on the time of the execution of the last run of the JobTrigger or the timespan endTime used in the last run of the JobTrigger.
    endTime string
    Exclude files, tables, or rows newer than this value. If not set, no upper time limit is applied.
    startTime string
    Exclude files, tables, or rows older than this value. If not set, no lower time limit is applied.
    timestampField PreventionJobTriggerInspectJobStorageConfigTimespanConfigTimestampField
    Specification of the field containing the timestamp of scanned items. Structure is documented below.
    enable_auto_population_of_timespan_config bool
    When the job is started by a JobTrigger we will automatically figure out a valid startTime to avoid scanning files that have not been modified since the last time the JobTrigger executed. This will be based on the time of the execution of the last run of the JobTrigger or the timespan endTime used in the last run of the JobTrigger.
    end_time str
    Exclude files, tables, or rows newer than this value. If not set, no upper time limit is applied.
    start_time str
    Exclude files, tables, or rows older than this value. If not set, no lower time limit is applied.
    timestamp_field PreventionJobTriggerInspectJobStorageConfigTimespanConfigTimestampField
    Specification of the field containing the timestamp of scanned items. Structure is documented below.
    enableAutoPopulationOfTimespanConfig Boolean
    When the job is started by a JobTrigger we will automatically figure out a valid startTime to avoid scanning files that have not been modified since the last time the JobTrigger executed. This will be based on the time of the execution of the last run of the JobTrigger or the timespan endTime used in the last run of the JobTrigger.
    endTime String
    Exclude files, tables, or rows newer than this value. If not set, no upper time limit is applied.
    startTime String
    Exclude files, tables, or rows older than this value. If not set, no lower time limit is applied.
    timestampField Property Map
    Specification of the field containing the timestamp of scanned items. Structure is documented below.

    PreventionJobTriggerInspectJobStorageConfigTimespanConfigTimestampField, PreventionJobTriggerInspectJobStorageConfigTimespanConfigTimestampFieldArgs

    Name string
    Specification of the field containing the timestamp of scanned items. Used for data sources like Datastore and BigQuery. For BigQuery: Required to filter out rows based on the given start and end times. If not specified and the table was modified between the given start and end times, the entire table will be scanned. The valid data types of the timestamp field are: INTEGER, DATE, TIMESTAMP, or DATETIME BigQuery column. For Datastore. Valid data types of the timestamp field are: TIMESTAMP. Datastore entity will be scanned if the timestamp property does not exist or its value is empty or invalid.
    Name string
    Specification of the field containing the timestamp of scanned items. Used for data sources like Datastore and BigQuery. For BigQuery: Required to filter out rows based on the given start and end times. If not specified and the table was modified between the given start and end times, the entire table will be scanned. The valid data types of the timestamp field are: INTEGER, DATE, TIMESTAMP, or DATETIME BigQuery column. For Datastore. Valid data types of the timestamp field are: TIMESTAMP. Datastore entity will be scanned if the timestamp property does not exist or its value is empty or invalid.
    name String
    Specification of the field containing the timestamp of scanned items. Used for data sources like Datastore and BigQuery. For BigQuery: Required to filter out rows based on the given start and end times. If not specified and the table was modified between the given start and end times, the entire table will be scanned. The valid data types of the timestamp field are: INTEGER, DATE, TIMESTAMP, or DATETIME BigQuery column. For Datastore. Valid data types of the timestamp field are: TIMESTAMP. Datastore entity will be scanned if the timestamp property does not exist or its value is empty or invalid.
    name string
    Specification of the field containing the timestamp of scanned items. Used for data sources like Datastore and BigQuery. For BigQuery: Required to filter out rows based on the given start and end times. If not specified and the table was modified between the given start and end times, the entire table will be scanned. The valid data types of the timestamp field are: INTEGER, DATE, TIMESTAMP, or DATETIME BigQuery column. For Datastore. Valid data types of the timestamp field are: TIMESTAMP. Datastore entity will be scanned if the timestamp property does not exist or its value is empty or invalid.
    name str
    Specification of the field containing the timestamp of scanned items. Used for data sources like Datastore and BigQuery. For BigQuery: Required to filter out rows based on the given start and end times. If not specified and the table was modified between the given start and end times, the entire table will be scanned. The valid data types of the timestamp field are: INTEGER, DATE, TIMESTAMP, or DATETIME BigQuery column. For Datastore. Valid data types of the timestamp field are: TIMESTAMP. Datastore entity will be scanned if the timestamp property does not exist or its value is empty or invalid.
    name String
    Specification of the field containing the timestamp of scanned items. Used for data sources like Datastore and BigQuery. For BigQuery: Required to filter out rows based on the given start and end times. If not specified and the table was modified between the given start and end times, the entire table will be scanned. The valid data types of the timestamp field are: INTEGER, DATE, TIMESTAMP, or DATETIME BigQuery column. For Datastore. Valid data types of the timestamp field are: TIMESTAMP. Datastore entity will be scanned if the timestamp property does not exist or its value is empty or invalid.

    PreventionJobTriggerTrigger, PreventionJobTriggerTriggerArgs

    Manual PreventionJobTriggerTriggerManual
    For use with hybrid jobs. Jobs must be manually created and finished.
    Schedule PreventionJobTriggerTriggerSchedule
    Schedule for triggered jobs Structure is documented below.
    Manual PreventionJobTriggerTriggerManual
    For use with hybrid jobs. Jobs must be manually created and finished.
    Schedule PreventionJobTriggerTriggerSchedule
    Schedule for triggered jobs Structure is documented below.
    manual PreventionJobTriggerTriggerManual
    For use with hybrid jobs. Jobs must be manually created and finished.
    schedule PreventionJobTriggerTriggerSchedule
    Schedule for triggered jobs Structure is documented below.
    manual PreventionJobTriggerTriggerManual
    For use with hybrid jobs. Jobs must be manually created and finished.
    schedule PreventionJobTriggerTriggerSchedule
    Schedule for triggered jobs Structure is documented below.
    manual PreventionJobTriggerTriggerManual
    For use with hybrid jobs. Jobs must be manually created and finished.
    schedule PreventionJobTriggerTriggerSchedule
    Schedule for triggered jobs Structure is documented below.
    manual Property Map
    For use with hybrid jobs. Jobs must be manually created and finished.
    schedule Property Map
    Schedule for triggered jobs Structure is documented below.

    PreventionJobTriggerTriggerSchedule, PreventionJobTriggerTriggerScheduleArgs

    RecurrencePeriodDuration string
    With this option a job is started a regular periodic basis. For example: every day (86400 seconds). A scheduled start time will be skipped if the previous execution has not ended when its scheduled time occurs. This value must be set to a time duration greater than or equal to 1 day and can be no longer than 60 days. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".


    RecurrencePeriodDuration string
    With this option a job is started a regular periodic basis. For example: every day (86400 seconds). A scheduled start time will be skipped if the previous execution has not ended when its scheduled time occurs. This value must be set to a time duration greater than or equal to 1 day and can be no longer than 60 days. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".


    recurrencePeriodDuration String
    With this option a job is started a regular periodic basis. For example: every day (86400 seconds). A scheduled start time will be skipped if the previous execution has not ended when its scheduled time occurs. This value must be set to a time duration greater than or equal to 1 day and can be no longer than 60 days. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".


    recurrencePeriodDuration string
    With this option a job is started a regular periodic basis. For example: every day (86400 seconds). A scheduled start time will be skipped if the previous execution has not ended when its scheduled time occurs. This value must be set to a time duration greater than or equal to 1 day and can be no longer than 60 days. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".


    recurrence_period_duration str
    With this option a job is started a regular periodic basis. For example: every day (86400 seconds). A scheduled start time will be skipped if the previous execution has not ended when its scheduled time occurs. This value must be set to a time duration greater than or equal to 1 day and can be no longer than 60 days. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".


    recurrencePeriodDuration String
    With this option a job is started a regular periodic basis. For example: every day (86400 seconds). A scheduled start time will be skipped if the previous execution has not ended when its scheduled time occurs. This value must be set to a time duration greater than or equal to 1 day and can be no longer than 60 days. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".


    Import

    JobTrigger can be imported using any of these accepted formats:

    • {{parent}}/jobTriggers/{{name}}

    • {{parent}}/{{name}}

    When using the pulumi import command, JobTrigger can be imported using one of the formats above. For example:

    $ pulumi import gcp:dataloss/preventionJobTrigger:PreventionJobTrigger default {{parent}}/jobTriggers/{{name}}
    
    $ pulumi import gcp:dataloss/preventionJobTrigger:PreventionJobTrigger default {{parent}}/{{name}}
    

    Package Details

    Repository
    Google Cloud (GCP) Classic pulumi/pulumi-gcp
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the google-beta Terraform Provider.
    gcp logo
    Google Cloud Classic v7.16.0 published on Wednesday, Mar 27, 2024 by Pulumi