aws logo
AWS Classic v5.41.0, May 15 23

aws.sagemaker.DataQualityJobDefinition

Explore with Pulumi AI

Provides a SageMaker data quality job definition resource.

Example Usage

Basic usage

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;

return await Deployment.RunAsync(() => 
{
    var test = new Aws.Sagemaker.DataQualityJobDefinition("test", new()
    {
        DataQualityAppSpecification = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityAppSpecificationArgs
        {
            ImageUri = data.Aws_sagemaker_prebuilt_ecr_image.Monitor.Registry_path,
        },
        DataQualityJobInput = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputArgs
        {
            EndpointInput = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs
            {
                EndpointName = aws_sagemaker_endpoint.My_endpoint.Name,
            },
        },
        DataQualityJobOutputConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigArgs
        {
            MonitoringOutputs = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs
            {
                S3Output = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs
                {
                    S3Uri = $"https://{aws_s3_bucket.My_bucket.Bucket_regional_domain_name}/output",
                },
            },
        },
        JobResources = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesArgs
        {
            ClusterConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesClusterConfigArgs
            {
                InstanceCount = 1,
                InstanceType = "ml.t3.medium",
                VolumeSizeInGb = 20,
            },
        },
        RoleArn = aws_iam_role.My_role.Arn,
    });

});
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-aws/sdk/v5/go/aws/sagemaker"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := sagemaker.NewDataQualityJobDefinition(ctx, "test", &sagemaker.DataQualityJobDefinitionArgs{
			DataQualityAppSpecification: &sagemaker.DataQualityJobDefinitionDataQualityAppSpecificationArgs{
				ImageUri: pulumi.Any(data.Aws_sagemaker_prebuilt_ecr_image.Monitor.Registry_path),
			},
			DataQualityJobInput: &sagemaker.DataQualityJobDefinitionDataQualityJobInputArgs{
				EndpointInput: &sagemaker.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs{
					EndpointName: pulumi.Any(aws_sagemaker_endpoint.My_endpoint.Name),
				},
			},
			DataQualityJobOutputConfig: &sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigArgs{
				MonitoringOutputs: &sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs{
					S3Output: sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs{
						S3Uri: pulumi.String(fmt.Sprintf("https://%v/output", aws_s3_bucket.My_bucket.Bucket_regional_domain_name)),
					},
				},
			},
			JobResources: &sagemaker.DataQualityJobDefinitionJobResourcesArgs{
				ClusterConfig: &sagemaker.DataQualityJobDefinitionJobResourcesClusterConfigArgs{
					InstanceCount:  pulumi.Int(1),
					InstanceType:   pulumi.String("ml.t3.medium"),
					VolumeSizeInGb: pulumi.Int(20),
				},
			},
			RoleArn: pulumi.Any(aws_iam_role.My_role.Arn),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.sagemaker.DataQualityJobDefinition;
import com.pulumi.aws.sagemaker.DataQualityJobDefinitionArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityAppSpecificationArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobInputArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobOutputConfigArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionJobResourcesArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionJobResourcesClusterConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var test = new DataQualityJobDefinition("test", DataQualityJobDefinitionArgs.builder()        
            .dataQualityAppSpecification(DataQualityJobDefinitionDataQualityAppSpecificationArgs.builder()
                .imageUri(data.aws_sagemaker_prebuilt_ecr_image().monitor().registry_path())
                .build())
            .dataQualityJobInput(DataQualityJobDefinitionDataQualityJobInputArgs.builder()
                .endpointInput(DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs.builder()
                    .endpointName(aws_sagemaker_endpoint.my_endpoint().name())
                    .build())
                .build())
            .dataQualityJobOutputConfig(DataQualityJobDefinitionDataQualityJobOutputConfigArgs.builder()
                .monitoringOutputs(DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs.builder()
                    .s3Output(DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs.builder()
                        .s3Uri(String.format("https://%s/output", aws_s3_bucket.my_bucket().bucket_regional_domain_name()))
                        .build())
                    .build())
                .build())
            .jobResources(DataQualityJobDefinitionJobResourcesArgs.builder()
                .clusterConfig(DataQualityJobDefinitionJobResourcesClusterConfigArgs.builder()
                    .instanceCount(1)
                    .instanceType("ml.t3.medium")
                    .volumeSizeInGb(20)
                    .build())
                .build())
            .roleArn(aws_iam_role.my_role().arn())
            .build());

    }
}
import pulumi
import pulumi_aws as aws

test = aws.sagemaker.DataQualityJobDefinition("test",
    data_quality_app_specification=aws.sagemaker.DataQualityJobDefinitionDataQualityAppSpecificationArgs(
        image_uri=data["aws_sagemaker_prebuilt_ecr_image"]["monitor"]["registry_path"],
    ),
    data_quality_job_input=aws.sagemaker.DataQualityJobDefinitionDataQualityJobInputArgs(
        endpoint_input=aws.sagemaker.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs(
            endpoint_name=aws_sagemaker_endpoint["my_endpoint"]["name"],
        ),
    ),
    data_quality_job_output_config=aws.sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigArgs(
        monitoring_outputs=aws.sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs(
            s3_output=aws.sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs(
                s3_uri=f"https://{aws_s3_bucket['my_bucket']['bucket_regional_domain_name']}/output",
            ),
        ),
    ),
    job_resources=aws.sagemaker.DataQualityJobDefinitionJobResourcesArgs(
        cluster_config=aws.sagemaker.DataQualityJobDefinitionJobResourcesClusterConfigArgs(
            instance_count=1,
            instance_type="ml.t3.medium",
            volume_size_in_gb=20,
        ),
    ),
    role_arn=aws_iam_role["my_role"]["arn"])
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";

const test = new aws.sagemaker.DataQualityJobDefinition("test", {
    dataQualityAppSpecification: {
        imageUri: data.aws_sagemaker_prebuilt_ecr_image.monitor.registry_path,
    },
    dataQualityJobInput: {
        endpointInput: {
            endpointName: aws_sagemaker_endpoint.my_endpoint.name,
        },
    },
    dataQualityJobOutputConfig: {
        monitoringOutputs: {
            s3Output: {
                s3Uri: `https://${aws_s3_bucket.my_bucket.bucket_regional_domain_name}/output`,
            },
        },
    },
    jobResources: {
        clusterConfig: {
            instanceCount: 1,
            instanceType: "ml.t3.medium",
            volumeSizeInGb: 20,
        },
    },
    roleArn: aws_iam_role.my_role.arn,
});
resources:
  test:
    type: aws:sagemaker:DataQualityJobDefinition
    properties:
      dataQualityAppSpecification:
        imageUri: ${data.aws_sagemaker_prebuilt_ecr_image.monitor.registry_path}
      dataQualityJobInput:
        endpointInput:
          endpointName: ${aws_sagemaker_endpoint.my_endpoint.name}
      dataQualityJobOutputConfig:
        monitoringOutputs:
          s3Output:
            s3Uri: https://${aws_s3_bucket.my_bucket.bucket_regional_domain_name}/output
      jobResources:
        clusterConfig:
          instanceCount: 1
          instanceType: ml.t3.medium
          volumeSizeInGb: 20
      roleArn: ${aws_iam_role.my_role.arn}

Create DataQualityJobDefinition Resource

new DataQualityJobDefinition(name: string, args: DataQualityJobDefinitionArgs, opts?: CustomResourceOptions);
@overload
def DataQualityJobDefinition(resource_name: str,
                             opts: Optional[ResourceOptions] = None,
                             data_quality_app_specification: Optional[DataQualityJobDefinitionDataQualityAppSpecificationArgs] = None,
                             data_quality_baseline_config: Optional[DataQualityJobDefinitionDataQualityBaselineConfigArgs] = None,
                             data_quality_job_input: Optional[DataQualityJobDefinitionDataQualityJobInputArgs] = None,
                             data_quality_job_output_config: Optional[DataQualityJobDefinitionDataQualityJobOutputConfigArgs] = None,
                             job_resources: Optional[DataQualityJobDefinitionJobResourcesArgs] = None,
                             name: Optional[str] = None,
                             network_config: Optional[DataQualityJobDefinitionNetworkConfigArgs] = None,
                             role_arn: Optional[str] = None,
                             stopping_condition: Optional[DataQualityJobDefinitionStoppingConditionArgs] = None,
                             tags: Optional[Mapping[str, str]] = None)
@overload
def DataQualityJobDefinition(resource_name: str,
                             args: DataQualityJobDefinitionArgs,
                             opts: Optional[ResourceOptions] = None)
func NewDataQualityJobDefinition(ctx *Context, name string, args DataQualityJobDefinitionArgs, opts ...ResourceOption) (*DataQualityJobDefinition, error)
public DataQualityJobDefinition(string name, DataQualityJobDefinitionArgs args, CustomResourceOptions? opts = null)
public DataQualityJobDefinition(String name, DataQualityJobDefinitionArgs args)
public DataQualityJobDefinition(String name, DataQualityJobDefinitionArgs args, CustomResourceOptions options)
type: aws:sagemaker:DataQualityJobDefinition
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args DataQualityJobDefinitionArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args DataQualityJobDefinitionArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args DataQualityJobDefinitionArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args DataQualityJobDefinitionArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args DataQualityJobDefinitionArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

DataQualityJobDefinition Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The DataQualityJobDefinition resource accepts the following input properties:

DataQualityAppSpecification Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityAppSpecificationArgs

Specifies the container that runs the monitoring job. Fields are documented below.

DataQualityJobInput Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputArgs

A list of inputs for the monitoring job. Fields are documented below.

DataQualityJobOutputConfig Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigArgs

The output configuration for monitoring jobs. Fields are documented below.

JobResources Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesArgs

Identifies the resources to deploy for a monitoring job. Fields are documented below.

RoleArn string

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

DataQualityBaselineConfig Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityBaselineConfigArgs

Configures the constraints and baselines for the monitoring job. Fields are documented below.

Name string

The name of the data quality job definition. If omitted, the provider will assign a random, unique name.

NetworkConfig Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionNetworkConfigArgs

Specifies networking configuration for the monitoring job. Fields are documented below.

StoppingCondition Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionStoppingConditionArgs

A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.

Tags Dictionary<string, string>

A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

DataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecificationArgs

Specifies the container that runs the monitoring job. Fields are documented below.

DataQualityJobInput DataQualityJobDefinitionDataQualityJobInputArgs

A list of inputs for the monitoring job. Fields are documented below.

DataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfigArgs

The output configuration for monitoring jobs. Fields are documented below.

JobResources DataQualityJobDefinitionJobResourcesArgs

Identifies the resources to deploy for a monitoring job. Fields are documented below.

RoleArn string

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

DataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfigArgs

Configures the constraints and baselines for the monitoring job. Fields are documented below.

Name string

The name of the data quality job definition. If omitted, the provider will assign a random, unique name.

NetworkConfig DataQualityJobDefinitionNetworkConfigArgs

Specifies networking configuration for the monitoring job. Fields are documented below.

StoppingCondition DataQualityJobDefinitionStoppingConditionArgs

A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.

Tags map[string]string

A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

dataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecificationArgs

Specifies the container that runs the monitoring job. Fields are documented below.

dataQualityJobInput DataQualityJobDefinitionDataQualityJobInputArgs

A list of inputs for the monitoring job. Fields are documented below.

dataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfigArgs

The output configuration for monitoring jobs. Fields are documented below.

jobResources DataQualityJobDefinitionJobResourcesArgs

Identifies the resources to deploy for a monitoring job. Fields are documented below.

roleArn String

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

dataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfigArgs

Configures the constraints and baselines for the monitoring job. Fields are documented below.

name String

The name of the data quality job definition. If omitted, the provider will assign a random, unique name.

networkConfig DataQualityJobDefinitionNetworkConfigArgs

Specifies networking configuration for the monitoring job. Fields are documented below.

stoppingCondition DataQualityJobDefinitionStoppingConditionArgs

A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.

tags Map<String,String>

A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

dataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecificationArgs

Specifies the container that runs the monitoring job. Fields are documented below.

dataQualityJobInput DataQualityJobDefinitionDataQualityJobInputArgs

A list of inputs for the monitoring job. Fields are documented below.

dataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfigArgs

The output configuration for monitoring jobs. Fields are documented below.

jobResources DataQualityJobDefinitionJobResourcesArgs

Identifies the resources to deploy for a monitoring job. Fields are documented below.

roleArn string

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

dataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfigArgs

Configures the constraints and baselines for the monitoring job. Fields are documented below.

name string

The name of the data quality job definition. If omitted, the provider will assign a random, unique name.

networkConfig DataQualityJobDefinitionNetworkConfigArgs

Specifies networking configuration for the monitoring job. Fields are documented below.

stoppingCondition DataQualityJobDefinitionStoppingConditionArgs

A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.

tags {[key: string]: string}

A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

data_quality_app_specification DataQualityJobDefinitionDataQualityAppSpecificationArgs

Specifies the container that runs the monitoring job. Fields are documented below.

data_quality_job_input DataQualityJobDefinitionDataQualityJobInputArgs

A list of inputs for the monitoring job. Fields are documented below.

data_quality_job_output_config DataQualityJobDefinitionDataQualityJobOutputConfigArgs

The output configuration for monitoring jobs. Fields are documented below.

job_resources DataQualityJobDefinitionJobResourcesArgs

Identifies the resources to deploy for a monitoring job. Fields are documented below.

role_arn str

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

data_quality_baseline_config DataQualityJobDefinitionDataQualityBaselineConfigArgs

Configures the constraints and baselines for the monitoring job. Fields are documented below.

name str

The name of the data quality job definition. If omitted, the provider will assign a random, unique name.

network_config DataQualityJobDefinitionNetworkConfigArgs

Specifies networking configuration for the monitoring job. Fields are documented below.

stopping_condition DataQualityJobDefinitionStoppingConditionArgs

A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.

tags Mapping[str, str]

A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

dataQualityAppSpecification Property Map

Specifies the container that runs the monitoring job. Fields are documented below.

dataQualityJobInput Property Map

A list of inputs for the monitoring job. Fields are documented below.

dataQualityJobOutputConfig Property Map

The output configuration for monitoring jobs. Fields are documented below.

jobResources Property Map

Identifies the resources to deploy for a monitoring job. Fields are documented below.

roleArn String

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

dataQualityBaselineConfig Property Map

Configures the constraints and baselines for the monitoring job. Fields are documented below.

name String

The name of the data quality job definition. If omitted, the provider will assign a random, unique name.

networkConfig Property Map

Specifies networking configuration for the monitoring job. Fields are documented below.

stoppingCondition Property Map

A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.

tags Map<String>

A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

Outputs

All input properties are implicitly available as output properties. Additionally, the DataQualityJobDefinition resource produces the following output properties:

Arn string

The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.

Id string

The provider-assigned unique ID for this managed resource.

TagsAll Dictionary<string, string>

A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

Arn string

The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.

Id string

The provider-assigned unique ID for this managed resource.

TagsAll map[string]string

A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

arn String

The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.

id String

The provider-assigned unique ID for this managed resource.

tagsAll Map<String,String>

A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

arn string

The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.

id string

The provider-assigned unique ID for this managed resource.

tagsAll {[key: string]: string}

A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

arn str

The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.

id str

The provider-assigned unique ID for this managed resource.

tags_all Mapping[str, str]

A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

arn String

The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.

id String

The provider-assigned unique ID for this managed resource.

tagsAll Map<String>

A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

Look up Existing DataQualityJobDefinition Resource

Get an existing DataQualityJobDefinition resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: DataQualityJobDefinitionState, opts?: CustomResourceOptions): DataQualityJobDefinition
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        arn: Optional[str] = None,
        data_quality_app_specification: Optional[DataQualityJobDefinitionDataQualityAppSpecificationArgs] = None,
        data_quality_baseline_config: Optional[DataQualityJobDefinitionDataQualityBaselineConfigArgs] = None,
        data_quality_job_input: Optional[DataQualityJobDefinitionDataQualityJobInputArgs] = None,
        data_quality_job_output_config: Optional[DataQualityJobDefinitionDataQualityJobOutputConfigArgs] = None,
        job_resources: Optional[DataQualityJobDefinitionJobResourcesArgs] = None,
        name: Optional[str] = None,
        network_config: Optional[DataQualityJobDefinitionNetworkConfigArgs] = None,
        role_arn: Optional[str] = None,
        stopping_condition: Optional[DataQualityJobDefinitionStoppingConditionArgs] = None,
        tags: Optional[Mapping[str, str]] = None,
        tags_all: Optional[Mapping[str, str]] = None) -> DataQualityJobDefinition
func GetDataQualityJobDefinition(ctx *Context, name string, id IDInput, state *DataQualityJobDefinitionState, opts ...ResourceOption) (*DataQualityJobDefinition, error)
public static DataQualityJobDefinition Get(string name, Input<string> id, DataQualityJobDefinitionState? state, CustomResourceOptions? opts = null)
public static DataQualityJobDefinition get(String name, Output<String> id, DataQualityJobDefinitionState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
Arn string

The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.

DataQualityAppSpecification Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityAppSpecificationArgs

Specifies the container that runs the monitoring job. Fields are documented below.

DataQualityBaselineConfig Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityBaselineConfigArgs

Configures the constraints and baselines for the monitoring job. Fields are documented below.

DataQualityJobInput Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputArgs

A list of inputs for the monitoring job. Fields are documented below.

DataQualityJobOutputConfig Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigArgs

The output configuration for monitoring jobs. Fields are documented below.

JobResources Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesArgs

Identifies the resources to deploy for a monitoring job. Fields are documented below.

Name string

The name of the data quality job definition. If omitted, the provider will assign a random, unique name.

NetworkConfig Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionNetworkConfigArgs

Specifies networking configuration for the monitoring job. Fields are documented below.

RoleArn string

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

StoppingCondition Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionStoppingConditionArgs

A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.

Tags Dictionary<string, string>

A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

TagsAll Dictionary<string, string>

A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

Arn string

The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.

DataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecificationArgs

Specifies the container that runs the monitoring job. Fields are documented below.

DataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfigArgs

Configures the constraints and baselines for the monitoring job. Fields are documented below.

DataQualityJobInput DataQualityJobDefinitionDataQualityJobInputArgs

A list of inputs for the monitoring job. Fields are documented below.

DataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfigArgs

The output configuration for monitoring jobs. Fields are documented below.

JobResources DataQualityJobDefinitionJobResourcesArgs

Identifies the resources to deploy for a monitoring job. Fields are documented below.

Name string

The name of the data quality job definition. If omitted, the provider will assign a random, unique name.

NetworkConfig DataQualityJobDefinitionNetworkConfigArgs

Specifies networking configuration for the monitoring job. Fields are documented below.

RoleArn string

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

StoppingCondition DataQualityJobDefinitionStoppingConditionArgs

A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.

Tags map[string]string

A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

TagsAll map[string]string

A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

arn String

The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.

dataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecificationArgs

Specifies the container that runs the monitoring job. Fields are documented below.

dataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfigArgs

Configures the constraints and baselines for the monitoring job. Fields are documented below.

dataQualityJobInput DataQualityJobDefinitionDataQualityJobInputArgs

A list of inputs for the monitoring job. Fields are documented below.

dataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfigArgs

The output configuration for monitoring jobs. Fields are documented below.

jobResources DataQualityJobDefinitionJobResourcesArgs

Identifies the resources to deploy for a monitoring job. Fields are documented below.

name String

The name of the data quality job definition. If omitted, the provider will assign a random, unique name.

networkConfig DataQualityJobDefinitionNetworkConfigArgs

Specifies networking configuration for the monitoring job. Fields are documented below.

roleArn String

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

stoppingCondition DataQualityJobDefinitionStoppingConditionArgs

A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.

tags Map<String,String>

A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

tagsAll Map<String,String>

A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

arn string

The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.

dataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecificationArgs

Specifies the container that runs the monitoring job. Fields are documented below.

dataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfigArgs

Configures the constraints and baselines for the monitoring job. Fields are documented below.

dataQualityJobInput DataQualityJobDefinitionDataQualityJobInputArgs

A list of inputs for the monitoring job. Fields are documented below.

dataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfigArgs

The output configuration for monitoring jobs. Fields are documented below.

jobResources DataQualityJobDefinitionJobResourcesArgs

Identifies the resources to deploy for a monitoring job. Fields are documented below.

name string

The name of the data quality job definition. If omitted, the provider will assign a random, unique name.

networkConfig DataQualityJobDefinitionNetworkConfigArgs

Specifies networking configuration for the monitoring job. Fields are documented below.

roleArn string

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

stoppingCondition DataQualityJobDefinitionStoppingConditionArgs

A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.

tags {[key: string]: string}

A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

tagsAll {[key: string]: string}

A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

arn str

The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.

data_quality_app_specification DataQualityJobDefinitionDataQualityAppSpecificationArgs

Specifies the container that runs the monitoring job. Fields are documented below.

data_quality_baseline_config DataQualityJobDefinitionDataQualityBaselineConfigArgs

Configures the constraints and baselines for the monitoring job. Fields are documented below.

data_quality_job_input DataQualityJobDefinitionDataQualityJobInputArgs

A list of inputs for the monitoring job. Fields are documented below.

data_quality_job_output_config DataQualityJobDefinitionDataQualityJobOutputConfigArgs

The output configuration for monitoring jobs. Fields are documented below.

job_resources DataQualityJobDefinitionJobResourcesArgs

Identifies the resources to deploy for a monitoring job. Fields are documented below.

name str

The name of the data quality job definition. If omitted, the provider will assign a random, unique name.

network_config DataQualityJobDefinitionNetworkConfigArgs

Specifies networking configuration for the monitoring job. Fields are documented below.

role_arn str

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

stopping_condition DataQualityJobDefinitionStoppingConditionArgs

A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.

tags Mapping[str, str]

A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

tags_all Mapping[str, str]

A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

arn String

The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.

dataQualityAppSpecification Property Map

Specifies the container that runs the monitoring job. Fields are documented below.

dataQualityBaselineConfig Property Map

Configures the constraints and baselines for the monitoring job. Fields are documented below.

dataQualityJobInput Property Map

A list of inputs for the monitoring job. Fields are documented below.

dataQualityJobOutputConfig Property Map

The output configuration for monitoring jobs. Fields are documented below.

jobResources Property Map

Identifies the resources to deploy for a monitoring job. Fields are documented below.

name String

The name of the data quality job definition. If omitted, the provider will assign a random, unique name.

networkConfig Property Map

Specifies networking configuration for the monitoring job. Fields are documented below.

roleArn String

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

stoppingCondition Property Map

A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.

tags Map<String>

A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

tagsAll Map<String>

A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

Supporting Types

DataQualityJobDefinitionDataQualityAppSpecification

ImageUri string

The container image that the data quality monitoring job runs.

Environment Dictionary<string, string>

Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.

PostAnalyticsProcessorSourceUri string

An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.

RecordPreprocessorSourceUri string

An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.

ImageUri string

The container image that the data quality monitoring job runs.

Environment map[string]string

Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.

PostAnalyticsProcessorSourceUri string

An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.

RecordPreprocessorSourceUri string

An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.

imageUri String

The container image that the data quality monitoring job runs.

environment Map<String,String>

Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.

postAnalyticsProcessorSourceUri String

An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.

recordPreprocessorSourceUri String

An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.

imageUri string

The container image that the data quality monitoring job runs.

environment {[key: string]: string}

Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.

postAnalyticsProcessorSourceUri string

An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.

recordPreprocessorSourceUri string

An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.

image_uri str

The container image that the data quality monitoring job runs.

environment Mapping[str, str]

Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.

post_analytics_processor_source_uri str

An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.

record_preprocessor_source_uri str

An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.

imageUri String

The container image that the data quality monitoring job runs.

environment Map<String>

Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.

postAnalyticsProcessorSourceUri String

An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.

recordPreprocessorSourceUri String

An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.

DataQualityJobDefinitionDataQualityBaselineConfig

ConstraintsResource DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResource

The constraints resource for a monitoring job. Fields are documented below.

StatisticsResource DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResource

The statistics resource for a monitoring job. Fields are documented below.

constraintsResource DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResource

The constraints resource for a monitoring job. Fields are documented below.

statisticsResource DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResource

The statistics resource for a monitoring job. Fields are documented below.

constraintsResource DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResource

The constraints resource for a monitoring job. Fields are documented below.

statisticsResource DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResource

The statistics resource for a monitoring job. Fields are documented below.

constraints_resource DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResource

The constraints resource for a monitoring job. Fields are documented below.

statistics_resource DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResource

The statistics resource for a monitoring job. Fields are documented below.

constraintsResource Property Map

The constraints resource for a monitoring job. Fields are documented below.

statisticsResource Property Map

The statistics resource for a monitoring job. Fields are documented below.

DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResource

S3Uri string

The Amazon S3 URI for the constraints resource.

S3Uri string

The Amazon S3 URI for the constraints resource.

s3Uri String

The Amazon S3 URI for the constraints resource.

s3Uri string

The Amazon S3 URI for the constraints resource.

s3_uri str

The Amazon S3 URI for the constraints resource.

s3Uri String

The Amazon S3 URI for the constraints resource.

DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResource

S3Uri string

The Amazon S3 URI for the statistics resource.

S3Uri string

The Amazon S3 URI for the statistics resource.

s3Uri String

The Amazon S3 URI for the statistics resource.

s3Uri string

The Amazon S3 URI for the statistics resource.

s3_uri str

The Amazon S3 URI for the statistics resource.

s3Uri String

The Amazon S3 URI for the statistics resource.

DataQualityJobDefinitionDataQualityJobInput

BatchTransformInput DataQualityJobDefinitionDataQualityJobInputBatchTransformInput

Input object for the batch transform job. Fields are documented below.

EndpointInput DataQualityJobDefinitionDataQualityJobInputEndpointInput

Input object for the endpoint. Fields are documented below.

batchTransformInput DataQualityJobDefinitionDataQualityJobInputBatchTransformInput

Input object for the batch transform job. Fields are documented below.

endpointInput DataQualityJobDefinitionDataQualityJobInputEndpointInput

Input object for the endpoint. Fields are documented below.

batchTransformInput DataQualityJobDefinitionDataQualityJobInputBatchTransformInput

Input object for the batch transform job. Fields are documented below.

endpointInput DataQualityJobDefinitionDataQualityJobInputEndpointInput

Input object for the endpoint. Fields are documented below.

batch_transform_input DataQualityJobDefinitionDataQualityJobInputBatchTransformInput

Input object for the batch transform job. Fields are documented below.

endpoint_input DataQualityJobDefinitionDataQualityJobInputEndpointInput

Input object for the endpoint. Fields are documented below.

batchTransformInput Property Map

Input object for the batch transform job. Fields are documented below.

endpointInput Property Map

Input object for the endpoint. Fields are documented below.

DataQualityJobDefinitionDataQualityJobInputBatchTransformInput

DataCapturedDestinationS3Uri string

The Amazon S3 location being used to capture the data.

DatasetFormat Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat

The dataset format for your batch transform job. Fields are documented below.

LocalPath string

Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.

S3DataDistributionType string

Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key

S3InputMode string

Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

DataCapturedDestinationS3Uri string

The Amazon S3 location being used to capture the data.

DatasetFormat DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat

The dataset format for your batch transform job. Fields are documented below.

LocalPath string

Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.

S3DataDistributionType string

Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key

S3InputMode string

Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

dataCapturedDestinationS3Uri String

The Amazon S3 location being used to capture the data.

datasetFormat DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat

The dataset format for your batch transform job. Fields are documented below.

localPath String

Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.

s3DataDistributionType String

Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key

s3InputMode String

Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

dataCapturedDestinationS3Uri string

The Amazon S3 location being used to capture the data.

datasetFormat DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat

The dataset format for your batch transform job. Fields are documented below.

localPath string

Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.

s3DataDistributionType string

Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key

s3InputMode string

Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

data_captured_destination_s3_uri str

The Amazon S3 location being used to capture the data.

dataset_format DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat

The dataset format for your batch transform job. Fields are documented below.

local_path str

Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.

s3_data_distribution_type str

Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key

s3_input_mode str

Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

dataCapturedDestinationS3Uri String

The Amazon S3 location being used to capture the data.

datasetFormat Property Map

The dataset format for your batch transform job. Fields are documented below.

localPath String

Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.

s3DataDistributionType String

Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key

s3InputMode String

Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat

Csv Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv

The CSV dataset used in the monitoring job. Fields are documented below.

Json Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson

The JSON dataset used in the monitoring job. Fields are documented below.

Csv DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv

The CSV dataset used in the monitoring job. Fields are documented below.

Json DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson

The JSON dataset used in the monitoring job. Fields are documented below.

csv DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv

The CSV dataset used in the monitoring job. Fields are documented below.

json DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson

The JSON dataset used in the monitoring job. Fields are documented below.

csv DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv

The CSV dataset used in the monitoring job. Fields are documented below.

json DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson

The JSON dataset used in the monitoring job. Fields are documented below.

csv DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv

The CSV dataset used in the monitoring job. Fields are documented below.

json DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson

The JSON dataset used in the monitoring job. Fields are documented below.

csv Property Map

The CSV dataset used in the monitoring job. Fields are documented below.

json Property Map

The JSON dataset used in the monitoring job. Fields are documented below.

DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv

Header bool

Indicates if the CSV data has a header.

Header bool

Indicates if the CSV data has a header.

header Boolean

Indicates if the CSV data has a header.

header boolean

Indicates if the CSV data has a header.

header bool

Indicates if the CSV data has a header.

header Boolean

Indicates if the CSV data has a header.

DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson

Line bool

Indicates if the file should be read as a json object per line.

Line bool

Indicates if the file should be read as a json object per line.

line Boolean

Indicates if the file should be read as a json object per line.

line boolean

Indicates if the file should be read as a json object per line.

line bool

Indicates if the file should be read as a json object per line.

line Boolean

Indicates if the file should be read as a json object per line.

DataQualityJobDefinitionDataQualityJobInputEndpointInput

EndpointName string

An endpoint in customer's account which has data_capture_config enabled.

LocalPath string

Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.

S3DataDistributionType string

Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key

S3InputMode string

Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

EndpointName string

An endpoint in customer's account which has data_capture_config enabled.

LocalPath string

Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.

S3DataDistributionType string

Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key

S3InputMode string

Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

endpointName String

An endpoint in customer's account which has data_capture_config enabled.

localPath String

Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.

s3DataDistributionType String

Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key

s3InputMode String

Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

endpointName string

An endpoint in customer's account which has data_capture_config enabled.

localPath string

Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.

s3DataDistributionType string

Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key

s3InputMode string

Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

endpoint_name str

An endpoint in customer's account which has data_capture_config enabled.

local_path str

Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.

s3_data_distribution_type str

Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key

s3_input_mode str

Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

endpointName String

An endpoint in customer's account which has data_capture_config enabled.

localPath String

Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.

s3DataDistributionType String

Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key

s3InputMode String

Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

DataQualityJobDefinitionDataQualityJobOutputConfig

MonitoringOutputs Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs

Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.

KmsKeyId string

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.

MonitoringOutputs DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs

Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.

KmsKeyId string

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.

monitoringOutputs DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs

Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.

kmsKeyId String

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.

monitoringOutputs DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs

Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.

kmsKeyId string

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.

monitoring_outputs DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs

Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.

kms_key_id str

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.

monitoringOutputs Property Map

Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.

kmsKeyId String

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.

DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs

S3Output Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output

The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.

S3Output DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output

The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.

s3Output DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output

The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.

s3Output DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output

The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.

s3_output DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output

The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.

s3Output Property Map

The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.

DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output

S3Uri string

The Amazon S3 URI for the constraints resource.

LocalPath string

Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.

S3UploadMode string

Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are Continuous or EndOfJob

S3Uri string

The Amazon S3 URI for the constraints resource.

LocalPath string

Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.

S3UploadMode string

Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are Continuous or EndOfJob

s3Uri String

The Amazon S3 URI for the constraints resource.

localPath String

Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.

s3UploadMode String

Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are Continuous or EndOfJob

s3Uri string

The Amazon S3 URI for the constraints resource.

localPath string

Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.

s3UploadMode string

Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are Continuous or EndOfJob

s3_uri str

The Amazon S3 URI for the constraints resource.

local_path str

Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.

s3_upload_mode str

Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are Continuous or EndOfJob

s3Uri String

The Amazon S3 URI for the constraints resource.

localPath String

Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.

s3UploadMode String

Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are Continuous or EndOfJob

DataQualityJobDefinitionJobResources

ClusterConfig Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesClusterConfig

The configuration for the cluster resources used to run the processing job. Fields are documented below.

ClusterConfig DataQualityJobDefinitionJobResourcesClusterConfig

The configuration for the cluster resources used to run the processing job. Fields are documented below.

clusterConfig DataQualityJobDefinitionJobResourcesClusterConfig

The configuration for the cluster resources used to run the processing job. Fields are documented below.

clusterConfig DataQualityJobDefinitionJobResourcesClusterConfig

The configuration for the cluster resources used to run the processing job. Fields are documented below.

cluster_config DataQualityJobDefinitionJobResourcesClusterConfig

The configuration for the cluster resources used to run the processing job. Fields are documented below.

clusterConfig Property Map

The configuration for the cluster resources used to run the processing job. Fields are documented below.

DataQualityJobDefinitionJobResourcesClusterConfig

InstanceCount int

The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.

InstanceType string

The ML compute instance type for the processing job.

VolumeSizeInGb int

The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.

VolumeKmsKeyId string

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.

InstanceCount int

The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.

InstanceType string

The ML compute instance type for the processing job.

VolumeSizeInGb int

The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.

VolumeKmsKeyId string

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.

instanceCount Integer

The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.

instanceType String

The ML compute instance type for the processing job.

volumeSizeInGb Integer

The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.

volumeKmsKeyId String

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.

instanceCount number

The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.

instanceType string

The ML compute instance type for the processing job.

volumeSizeInGb number

The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.

volumeKmsKeyId string

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.

instance_count int

The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.

instance_type str

The ML compute instance type for the processing job.

volume_size_in_gb int

The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.

volume_kms_key_id str

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.

instanceCount Number

The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.

instanceType String

The ML compute instance type for the processing job.

volumeSizeInGb Number

The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.

volumeKmsKeyId String

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.

DataQualityJobDefinitionNetworkConfig

EnableInterContainerTrafficEncryption bool

Whether to encrypt all communications between the instances used for the monitoring jobs. Choose true to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.

EnableNetworkIsolation bool

Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.

VpcConfig Pulumi.Aws.Sagemaker.Inputs.DataQualityJobDefinitionNetworkConfigVpcConfig

Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.

EnableInterContainerTrafficEncryption bool

Whether to encrypt all communications between the instances used for the monitoring jobs. Choose true to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.

EnableNetworkIsolation bool

Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.

VpcConfig DataQualityJobDefinitionNetworkConfigVpcConfig

Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.

enableInterContainerTrafficEncryption Boolean

Whether to encrypt all communications between the instances used for the monitoring jobs. Choose true to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.

enableNetworkIsolation Boolean

Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.

vpcConfig DataQualityJobDefinitionNetworkConfigVpcConfig

Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.

enableInterContainerTrafficEncryption boolean

Whether to encrypt all communications between the instances used for the monitoring jobs. Choose true to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.

enableNetworkIsolation boolean

Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.

vpcConfig DataQualityJobDefinitionNetworkConfigVpcConfig

Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.

enable_inter_container_traffic_encryption bool

Whether to encrypt all communications between the instances used for the monitoring jobs. Choose true to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.

enable_network_isolation bool

Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.

vpc_config DataQualityJobDefinitionNetworkConfigVpcConfig

Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.

enableInterContainerTrafficEncryption Boolean

Whether to encrypt all communications between the instances used for the monitoring jobs. Choose true to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.

enableNetworkIsolation Boolean

Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.

vpcConfig Property Map

Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.

DataQualityJobDefinitionNetworkConfigVpcConfig

SecurityGroupIds List<string>

The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnets field.

Subnets List<string>

The ID of the subnets in the VPC to which you want to connect your training job or model.

SecurityGroupIds []string

The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnets field.

Subnets []string

The ID of the subnets in the VPC to which you want to connect your training job or model.

securityGroupIds List<String>

The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnets field.

subnets List<String>

The ID of the subnets in the VPC to which you want to connect your training job or model.

securityGroupIds string[]

The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnets field.

subnets string[]

The ID of the subnets in the VPC to which you want to connect your training job or model.

security_group_ids Sequence[str]

The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnets field.

subnets Sequence[str]

The ID of the subnets in the VPC to which you want to connect your training job or model.

securityGroupIds List<String>

The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnets field.

subnets List<String>

The ID of the subnets in the VPC to which you want to connect your training job or model.

DataQualityJobDefinitionStoppingCondition

MaxRuntimeInSeconds int

The maximum runtime allowed in seconds.

MaxRuntimeInSeconds int

The maximum runtime allowed in seconds.

maxRuntimeInSeconds Integer

The maximum runtime allowed in seconds.

maxRuntimeInSeconds number

The maximum runtime allowed in seconds.

max_runtime_in_seconds int

The maximum runtime allowed in seconds.

maxRuntimeInSeconds Number

The maximum runtime allowed in seconds.

Import

Data quality job definitions can be imported using the name, e.g.,

 $ pulumi import aws:sagemaker/dataQualityJobDefinition:DataQualityJobDefinition test_data_quality_job_definition data-quality-job-definition-foo

Package Details

Repository
AWS Classic pulumi/pulumi-aws
License
Apache-2.0
Notes

This Pulumi package is based on the aws Terraform Provider.