published on Tuesday, Mar 31, 2026 by Pulumi
published on Tuesday, Mar 31, 2026 by Pulumi
Manages an AWS SageMaker AI Algorithm.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.sagemaker.Algorithm("example", {
algorithmName: "example-algorithm",
trainingSpecification: {
supportedTrainingInstanceTypes: ["ml.m5.large"],
trainingImage: "123456789012.dkr.ecr.us-west-2.amazonaws.com/example-training:latest",
trainingChannels: [{
name: "train",
supportedContentTypes: ["text/csv"],
supportedInputModes: ["File"],
}],
},
tags: {
Environment: "test",
},
});
import pulumi
import pulumi_aws as aws
example = aws.sagemaker.Algorithm("example",
algorithm_name="example-algorithm",
training_specification={
"supported_training_instance_types": ["ml.m5.large"],
"training_image": "123456789012.dkr.ecr.us-west-2.amazonaws.com/example-training:latest",
"training_channels": [{
"name": "train",
"supported_content_types": ["text/csv"],
"supported_input_modes": ["File"],
}],
},
tags={
"Environment": "test",
})
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v7/go/aws/sagemaker"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := sagemaker.NewAlgorithm(ctx, "example", &sagemaker.AlgorithmArgs{
AlgorithmName: pulumi.String("example-algorithm"),
TrainingSpecification: &sagemaker.AlgorithmTrainingSpecificationArgs{
SupportedTrainingInstanceTypes: pulumi.StringArray{
pulumi.String("ml.m5.large"),
},
TrainingImage: pulumi.String("123456789012.dkr.ecr.us-west-2.amazonaws.com/example-training:latest"),
TrainingChannels: sagemaker.AlgorithmTrainingSpecificationTrainingChannelArray{
&sagemaker.AlgorithmTrainingSpecificationTrainingChannelArgs{
Name: pulumi.String("train"),
SupportedContentTypes: pulumi.StringArray{
pulumi.String("text/csv"),
},
SupportedInputModes: pulumi.StringArray{
pulumi.String("File"),
},
},
},
},
Tags: pulumi.StringMap{
"Environment": pulumi.String("test"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var example = new Aws.Sagemaker.Algorithm("example", new()
{
AlgorithmName = "example-algorithm",
TrainingSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationArgs
{
SupportedTrainingInstanceTypes = new[]
{
"ml.m5.large",
},
TrainingImage = "123456789012.dkr.ecr.us-west-2.amazonaws.com/example-training:latest",
TrainingChannels = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationTrainingChannelArgs
{
Name = "train",
SupportedContentTypes = new[]
{
"text/csv",
},
SupportedInputModes = new[]
{
"File",
},
},
},
},
Tags =
{
{ "Environment", "test" },
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.sagemaker.Algorithm;
import com.pulumi.aws.sagemaker.AlgorithmArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmTrainingSpecificationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Algorithm("example", AlgorithmArgs.builder()
.algorithmName("example-algorithm")
.trainingSpecification(AlgorithmTrainingSpecificationArgs.builder()
.supportedTrainingInstanceTypes("ml.m5.large")
.trainingImage("123456789012.dkr.ecr.us-west-2.amazonaws.com/example-training:latest")
.trainingChannels(AlgorithmTrainingSpecificationTrainingChannelArgs.builder()
.name("train")
.supportedContentTypes("text/csv")
.supportedInputModes("File")
.build())
.build())
.tags(Map.of("Environment", "test"))
.build());
}
}
resources:
example:
type: aws:sagemaker:Algorithm
properties:
algorithmName: example-algorithm
trainingSpecification:
supportedTrainingInstanceTypes:
- ml.m5.large
trainingImage: 123456789012.dkr.ecr.us-west-2.amazonaws.com/example-training:latest
trainingChannels:
- name: train
supportedContentTypes:
- text/csv
supportedInputModes:
- File
tags:
Environment: test
Training Specification
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = aws.sagemaker.getPrebuiltEcrImage({
repositoryName: "linear-learner",
imageTag: "1",
});
const exampleAlgorithm = new aws.sagemaker.Algorithm("example", {
algorithmName: "example-training-algorithm",
trainingSpecification: {
supportedTrainingInstanceTypes: [
"ml.m5.large",
"ml.c5.xlarge",
],
supportsDistributedTraining: true,
trainingImage: example.then(example => example.registryPath),
metricDefinitions: [{
name: "train:loss",
regex: "loss=(.*?);",
}],
supportedHyperParameters: [
{
defaultValue: "0.5",
description: "Continuous learning rate",
isRequired: true,
isTunable: true,
name: "eta",
type: "Continuous",
range: {
continuousParameterRangeSpecification: {
minValue: "0.1",
maxValue: "0.9",
},
},
},
{
defaultValue: "5",
description: "Maximum tree depth",
isRequired: false,
isTunable: true,
name: "max_depth",
type: "Integer",
range: {
integerParameterRangeSpecification: {
minValue: "1",
maxValue: "10",
},
},
},
{
defaultValue: "reg:squarederror",
description: "Objective function",
isRequired: false,
isTunable: false,
name: "objective",
type: "Categorical",
range: {
categoricalParameterRangeSpecification: {
values: [
"reg:squarederror",
"binary:logistic",
],
},
},
},
],
supportedTuningJobObjectiveMetrics: [{
metricName: "train:loss",
type: "Minimize",
}],
trainingChannels: [
{
description: "Training data channel",
isRequired: true,
name: "train",
supportedCompressionTypes: [
"None",
"Gzip",
],
supportedContentTypes: ["text/csv"],
supportedInputModes: ["File"],
},
{
name: "validation",
supportedContentTypes: ["application/json"],
supportedInputModes: ["Pipe"],
},
],
},
});
import pulumi
import pulumi_aws as aws
example = aws.sagemaker.get_prebuilt_ecr_image(repository_name="linear-learner",
image_tag="1")
example_algorithm = aws.sagemaker.Algorithm("example",
algorithm_name="example-training-algorithm",
training_specification={
"supported_training_instance_types": [
"ml.m5.large",
"ml.c5.xlarge",
],
"supports_distributed_training": True,
"training_image": example.registry_path,
"metric_definitions": [{
"name": "train:loss",
"regex": "loss=(.*?);",
}],
"supported_hyper_parameters": [
{
"default_value": "0.5",
"description": "Continuous learning rate",
"is_required": True,
"is_tunable": True,
"name": "eta",
"type": "Continuous",
"range": {
"continuous_parameter_range_specification": {
"min_value": "0.1",
"max_value": "0.9",
},
},
},
{
"default_value": "5",
"description": "Maximum tree depth",
"is_required": False,
"is_tunable": True,
"name": "max_depth",
"type": "Integer",
"range": {
"integer_parameter_range_specification": {
"min_value": "1",
"max_value": "10",
},
},
},
{
"default_value": "reg:squarederror",
"description": "Objective function",
"is_required": False,
"is_tunable": False,
"name": "objective",
"type": "Categorical",
"range": {
"categorical_parameter_range_specification": {
"values": [
"reg:squarederror",
"binary:logistic",
],
},
},
},
],
"supported_tuning_job_objective_metrics": [{
"metric_name": "train:loss",
"type": "Minimize",
}],
"training_channels": [
{
"description": "Training data channel",
"is_required": True,
"name": "train",
"supported_compression_types": [
"None",
"Gzip",
],
"supported_content_types": ["text/csv"],
"supported_input_modes": ["File"],
},
{
"name": "validation",
"supported_content_types": ["application/json"],
"supported_input_modes": ["Pipe"],
},
],
})
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v7/go/aws/sagemaker"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
example, err := sagemaker.GetPrebuiltEcrImage(ctx, &sagemaker.GetPrebuiltEcrImageArgs{
RepositoryName: "linear-learner",
ImageTag: pulumi.StringRef("1"),
}, nil)
if err != nil {
return err
}
_, err = sagemaker.NewAlgorithm(ctx, "example", &sagemaker.AlgorithmArgs{
AlgorithmName: pulumi.String("example-training-algorithm"),
TrainingSpecification: &sagemaker.AlgorithmTrainingSpecificationArgs{
SupportedTrainingInstanceTypes: pulumi.StringArray{
pulumi.String("ml.m5.large"),
pulumi.String("ml.c5.xlarge"),
},
SupportsDistributedTraining: pulumi.Bool(true),
TrainingImage: pulumi.String(example.RegistryPath),
MetricDefinitions: sagemaker.AlgorithmTrainingSpecificationMetricDefinitionArray{
&sagemaker.AlgorithmTrainingSpecificationMetricDefinitionArgs{
Name: pulumi.String("train:loss"),
Regex: pulumi.String("loss=(.*?);"),
},
},
SupportedHyperParameters: sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterArray{
&sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterArgs{
DefaultValue: pulumi.String("0.5"),
Description: pulumi.String("Continuous learning rate"),
IsRequired: pulumi.Bool(true),
IsTunable: pulumi.Bool(true),
Name: pulumi.String("eta"),
Type: pulumi.String("Continuous"),
Range: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs{
ContinuousParameterRangeSpecification: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeContinuousParameterRangeSpecificationArgs{
MinValue: pulumi.String("0.1"),
MaxValue: pulumi.String("0.9"),
},
},
},
&sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterArgs{
DefaultValue: pulumi.String("5"),
Description: pulumi.String("Maximum tree depth"),
IsRequired: pulumi.Bool(false),
IsTunable: pulumi.Bool(true),
Name: pulumi.String("max_depth"),
Type: pulumi.String("Integer"),
Range: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs{
IntegerParameterRangeSpecification: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs{
MinValue: pulumi.String("1"),
MaxValue: pulumi.String("10"),
},
},
},
&sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterArgs{
DefaultValue: pulumi.String("reg:squarederror"),
Description: pulumi.String("Objective function"),
IsRequired: pulumi.Bool(false),
IsTunable: pulumi.Bool(false),
Name: pulumi.String("objective"),
Type: pulumi.String("Categorical"),
Range: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs{
CategoricalParameterRangeSpecification: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeCategoricalParameterRangeSpecificationArgs{
Values: pulumi.StringArray{
pulumi.String("reg:squarederror"),
pulumi.String("binary:logistic"),
},
},
},
},
},
SupportedTuningJobObjectiveMetrics: sagemaker.AlgorithmTrainingSpecificationSupportedTuningJobObjectiveMetricArray{
&sagemaker.AlgorithmTrainingSpecificationSupportedTuningJobObjectiveMetricArgs{
MetricName: pulumi.String("train:loss"),
Type: pulumi.String("Minimize"),
},
},
TrainingChannels: sagemaker.AlgorithmTrainingSpecificationTrainingChannelArray{
&sagemaker.AlgorithmTrainingSpecificationTrainingChannelArgs{
Description: pulumi.String("Training data channel"),
IsRequired: pulumi.Bool(true),
Name: pulumi.String("train"),
SupportedCompressionTypes: pulumi.StringArray{
pulumi.String("None"),
pulumi.String("Gzip"),
},
SupportedContentTypes: pulumi.StringArray{
pulumi.String("text/csv"),
},
SupportedInputModes: pulumi.StringArray{
pulumi.String("File"),
},
},
&sagemaker.AlgorithmTrainingSpecificationTrainingChannelArgs{
Name: pulumi.String("validation"),
SupportedContentTypes: pulumi.StringArray{
pulumi.String("application/json"),
},
SupportedInputModes: pulumi.StringArray{
pulumi.String("Pipe"),
},
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var example = Aws.Sagemaker.GetPrebuiltEcrImage.Invoke(new()
{
RepositoryName = "linear-learner",
ImageTag = "1",
});
var exampleAlgorithm = new Aws.Sagemaker.Algorithm("example", new()
{
AlgorithmName = "example-training-algorithm",
TrainingSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationArgs
{
SupportedTrainingInstanceTypes = new[]
{
"ml.m5.large",
"ml.c5.xlarge",
},
SupportsDistributedTraining = true,
TrainingImage = example.Apply(getPrebuiltEcrImageResult => getPrebuiltEcrImageResult.RegistryPath),
MetricDefinitions = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationMetricDefinitionArgs
{
Name = "train:loss",
Regex = "loss=(.*?);",
},
},
SupportedHyperParameters = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterArgs
{
DefaultValue = "0.5",
Description = "Continuous learning rate",
IsRequired = true,
IsTunable = true,
Name = "eta",
Type = "Continuous",
Range = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs
{
ContinuousParameterRangeSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeContinuousParameterRangeSpecificationArgs
{
MinValue = "0.1",
MaxValue = "0.9",
},
},
},
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterArgs
{
DefaultValue = "5",
Description = "Maximum tree depth",
IsRequired = false,
IsTunable = true,
Name = "max_depth",
Type = "Integer",
Range = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs
{
IntegerParameterRangeSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs
{
MinValue = "1",
MaxValue = "10",
},
},
},
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterArgs
{
DefaultValue = "reg:squarederror",
Description = "Objective function",
IsRequired = false,
IsTunable = false,
Name = "objective",
Type = "Categorical",
Range = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs
{
CategoricalParameterRangeSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeCategoricalParameterRangeSpecificationArgs
{
Values = new[]
{
"reg:squarederror",
"binary:logistic",
},
},
},
},
},
SupportedTuningJobObjectiveMetrics = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedTuningJobObjectiveMetricArgs
{
MetricName = "train:loss",
Type = "Minimize",
},
},
TrainingChannels = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationTrainingChannelArgs
{
Description = "Training data channel",
IsRequired = true,
Name = "train",
SupportedCompressionTypes = new[]
{
"None",
"Gzip",
},
SupportedContentTypes = new[]
{
"text/csv",
},
SupportedInputModes = new[]
{
"File",
},
},
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationTrainingChannelArgs
{
Name = "validation",
SupportedContentTypes = new[]
{
"application/json",
},
SupportedInputModes = new[]
{
"Pipe",
},
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.sagemaker.SagemakerFunctions;
import com.pulumi.aws.sagemaker.inputs.GetPrebuiltEcrImageArgs;
import com.pulumi.aws.sagemaker.Algorithm;
import com.pulumi.aws.sagemaker.AlgorithmArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmTrainingSpecificationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var example = SagemakerFunctions.getPrebuiltEcrImage(GetPrebuiltEcrImageArgs.builder()
.repositoryName("linear-learner")
.imageTag("1")
.build());
var exampleAlgorithm = new Algorithm("exampleAlgorithm", AlgorithmArgs.builder()
.algorithmName("example-training-algorithm")
.trainingSpecification(AlgorithmTrainingSpecificationArgs.builder()
.supportedTrainingInstanceTypes(
"ml.m5.large",
"ml.c5.xlarge")
.supportsDistributedTraining(true)
.trainingImage(example.registryPath())
.metricDefinitions(AlgorithmTrainingSpecificationMetricDefinitionArgs.builder()
.name("train:loss")
.regex("loss=(.*?);")
.build())
.supportedHyperParameters(
AlgorithmTrainingSpecificationSupportedHyperParameterArgs.builder()
.defaultValue("0.5")
.description("Continuous learning rate")
.isRequired(true)
.isTunable(true)
.name("eta")
.type("Continuous")
.range(AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs.builder()
.continuousParameterRangeSpecification(AlgorithmTrainingSpecificationSupportedHyperParameterRangeContinuousParameterRangeSpecificationArgs.builder()
.minValue("0.1")
.maxValue("0.9")
.build())
.build())
.build(),
AlgorithmTrainingSpecificationSupportedHyperParameterArgs.builder()
.defaultValue("5")
.description("Maximum tree depth")
.isRequired(false)
.isTunable(true)
.name("max_depth")
.type("Integer")
.range(AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs.builder()
.integerParameterRangeSpecification(AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs.builder()
.minValue("1")
.maxValue("10")
.build())
.build())
.build(),
AlgorithmTrainingSpecificationSupportedHyperParameterArgs.builder()
.defaultValue("reg:squarederror")
.description("Objective function")
.isRequired(false)
.isTunable(false)
.name("objective")
.type("Categorical")
.range(AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs.builder()
.categoricalParameterRangeSpecification(AlgorithmTrainingSpecificationSupportedHyperParameterRangeCategoricalParameterRangeSpecificationArgs.builder()
.values(
"reg:squarederror",
"binary:logistic")
.build())
.build())
.build())
.supportedTuningJobObjectiveMetrics(AlgorithmTrainingSpecificationSupportedTuningJobObjectiveMetricArgs.builder()
.metricName("train:loss")
.type("Minimize")
.build())
.trainingChannels(
AlgorithmTrainingSpecificationTrainingChannelArgs.builder()
.description("Training data channel")
.isRequired(true)
.name("train")
.supportedCompressionTypes(
"None",
"Gzip")
.supportedContentTypes("text/csv")
.supportedInputModes("File")
.build(),
AlgorithmTrainingSpecificationTrainingChannelArgs.builder()
.name("validation")
.supportedContentTypes("application/json")
.supportedInputModes("Pipe")
.build())
.build())
.build());
}
}
resources:
exampleAlgorithm:
type: aws:sagemaker:Algorithm
name: example
properties:
algorithmName: example-training-algorithm
trainingSpecification:
supportedTrainingInstanceTypes:
- ml.m5.large
- ml.c5.xlarge
supportsDistributedTraining: true
trainingImage: ${example.registryPath}
metricDefinitions:
- name: train:loss
regex: loss=(.*?);
supportedHyperParameters:
- defaultValue: '0.5'
description: Continuous learning rate
isRequired: true
isTunable: true
name: eta
type: Continuous
range:
continuousParameterRangeSpecification:
minValue: '0.1'
maxValue: '0.9'
- defaultValue: '5'
description: Maximum tree depth
isRequired: false
isTunable: true
name: max_depth
type: Integer
range:
integerParameterRangeSpecification:
minValue: '1'
maxValue: '10'
- defaultValue: reg:squarederror
description: Objective function
isRequired: false
isTunable: false
name: objective
type: Categorical
range:
categoricalParameterRangeSpecification:
values:
- reg:squarederror
- binary:logistic
supportedTuningJobObjectiveMetrics:
- metricName: train:loss
type: Minimize
trainingChannels:
- description: Training data channel
isRequired: true
name: train
supportedCompressionTypes:
- None
- Gzip
supportedContentTypes:
- text/csv
supportedInputModes:
- File
- name: validation
supportedContentTypes:
- application/json
supportedInputModes:
- Pipe
variables:
example:
fn::invoke:
function: aws:sagemaker:getPrebuiltEcrImage
arguments:
repositoryName: linear-learner
imageTag: '1'
Inference Specification
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = aws.sagemaker.getPrebuiltEcrImage({
repositoryName: "linear-learner",
imageTag: "1",
});
const exampleAlgorithm = new aws.sagemaker.Algorithm("example", {
algorithmName: "example-inference-algorithm",
trainingSpecification: {
supportedTrainingInstanceTypes: ["ml.m5.large"],
trainingImage: example.then(example => example.registryPath),
trainingChannels: [{
name: "train",
supportedContentTypes: ["text/csv"],
supportedInputModes: ["File"],
}],
},
inferenceSpecification: {
supportedContentTypes: ["text/csv"],
supportedRealtimeInferenceInstanceTypes: ["ml.m5.large"],
supportedResponseMimeTypes: ["text/csv"],
supportedTransformInstanceTypes: ["ml.m5.large"],
containers: [{
containerHostname: "test-host",
environment: {
TEST: "value",
},
framework: "XGBOOST",
frameworkVersion: "1.5-1",
image: example.then(example => example.registryPath),
isCheckpoint: true,
nearestModelName: "nearest-model",
baseModel: {
hubContentName: "basemodel",
hubContentVersion: "1.0.0",
recipeName: "recipe",
},
modelInput: {
dataInputConfig: "{}",
},
}],
},
});
import pulumi
import pulumi_aws as aws
example = aws.sagemaker.get_prebuilt_ecr_image(repository_name="linear-learner",
image_tag="1")
example_algorithm = aws.sagemaker.Algorithm("example",
algorithm_name="example-inference-algorithm",
training_specification={
"supported_training_instance_types": ["ml.m5.large"],
"training_image": example.registry_path,
"training_channels": [{
"name": "train",
"supported_content_types": ["text/csv"],
"supported_input_modes": ["File"],
}],
},
inference_specification={
"supported_content_types": ["text/csv"],
"supported_realtime_inference_instance_types": ["ml.m5.large"],
"supported_response_mime_types": ["text/csv"],
"supported_transform_instance_types": ["ml.m5.large"],
"containers": [{
"container_hostname": "test-host",
"environment": {
"TEST": "value",
},
"framework": "XGBOOST",
"framework_version": "1.5-1",
"image": example.registry_path,
"is_checkpoint": True,
"nearest_model_name": "nearest-model",
"base_model": {
"hub_content_name": "basemodel",
"hub_content_version": "1.0.0",
"recipe_name": "recipe",
},
"model_input": {
"data_input_config": "{}",
},
}],
})
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v7/go/aws/sagemaker"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
example, err := sagemaker.GetPrebuiltEcrImage(ctx, &sagemaker.GetPrebuiltEcrImageArgs{
RepositoryName: "linear-learner",
ImageTag: pulumi.StringRef("1"),
}, nil)
if err != nil {
return err
}
_, err = sagemaker.NewAlgorithm(ctx, "example", &sagemaker.AlgorithmArgs{
AlgorithmName: pulumi.String("example-inference-algorithm"),
TrainingSpecification: &sagemaker.AlgorithmTrainingSpecificationArgs{
SupportedTrainingInstanceTypes: pulumi.StringArray{
pulumi.String("ml.m5.large"),
},
TrainingImage: pulumi.String(example.RegistryPath),
TrainingChannels: sagemaker.AlgorithmTrainingSpecificationTrainingChannelArray{
&sagemaker.AlgorithmTrainingSpecificationTrainingChannelArgs{
Name: pulumi.String("train"),
SupportedContentTypes: pulumi.StringArray{
pulumi.String("text/csv"),
},
SupportedInputModes: pulumi.StringArray{
pulumi.String("File"),
},
},
},
},
InferenceSpecification: &sagemaker.AlgorithmInferenceSpecificationArgs{
SupportedContentTypes: pulumi.StringArray{
pulumi.String("text/csv"),
},
SupportedRealtimeInferenceInstanceTypes: pulumi.StringArray{
pulumi.String("ml.m5.large"),
},
SupportedResponseMimeTypes: pulumi.StringArray{
pulumi.String("text/csv"),
},
SupportedTransformInstanceTypes: pulumi.StringArray{
pulumi.String("ml.m5.large"),
},
Containers: sagemaker.AlgorithmInferenceSpecificationContainerArray{
&sagemaker.AlgorithmInferenceSpecificationContainerArgs{
ContainerHostname: pulumi.String("test-host"),
Environment: pulumi.StringMap{
"TEST": pulumi.String("value"),
},
Framework: pulumi.String("XGBOOST"),
FrameworkVersion: pulumi.String("1.5-1"),
Image: pulumi.String(example.RegistryPath),
IsCheckpoint: pulumi.Bool(true),
NearestModelName: pulumi.String("nearest-model"),
BaseModel: &sagemaker.AlgorithmInferenceSpecificationContainerBaseModelArgs{
HubContentName: pulumi.String("basemodel"),
HubContentVersion: pulumi.String("1.0.0"),
RecipeName: pulumi.String("recipe"),
},
ModelInput: &sagemaker.AlgorithmInferenceSpecificationContainerModelInputArgs{
DataInputConfig: pulumi.String("{}"),
},
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var example = Aws.Sagemaker.GetPrebuiltEcrImage.Invoke(new()
{
RepositoryName = "linear-learner",
ImageTag = "1",
});
var exampleAlgorithm = new Aws.Sagemaker.Algorithm("example", new()
{
AlgorithmName = "example-inference-algorithm",
TrainingSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationArgs
{
SupportedTrainingInstanceTypes = new[]
{
"ml.m5.large",
},
TrainingImage = example.Apply(getPrebuiltEcrImageResult => getPrebuiltEcrImageResult.RegistryPath),
TrainingChannels = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationTrainingChannelArgs
{
Name = "train",
SupportedContentTypes = new[]
{
"text/csv",
},
SupportedInputModes = new[]
{
"File",
},
},
},
},
InferenceSpecification = new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationArgs
{
SupportedContentTypes = new[]
{
"text/csv",
},
SupportedRealtimeInferenceInstanceTypes = new[]
{
"ml.m5.large",
},
SupportedResponseMimeTypes = new[]
{
"text/csv",
},
SupportedTransformInstanceTypes = new[]
{
"ml.m5.large",
},
Containers = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationContainerArgs
{
ContainerHostname = "test-host",
Environment =
{
{ "TEST", "value" },
},
Framework = "XGBOOST",
FrameworkVersion = "1.5-1",
Image = example.Apply(getPrebuiltEcrImageResult => getPrebuiltEcrImageResult.RegistryPath),
IsCheckpoint = true,
NearestModelName = "nearest-model",
BaseModel = new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationContainerBaseModelArgs
{
HubContentName = "basemodel",
HubContentVersion = "1.0.0",
RecipeName = "recipe",
},
ModelInput = new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationContainerModelInputArgs
{
DataInputConfig = "{}",
},
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.sagemaker.SagemakerFunctions;
import com.pulumi.aws.sagemaker.inputs.GetPrebuiltEcrImageArgs;
import com.pulumi.aws.sagemaker.Algorithm;
import com.pulumi.aws.sagemaker.AlgorithmArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmTrainingSpecificationArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmInferenceSpecificationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var example = SagemakerFunctions.getPrebuiltEcrImage(GetPrebuiltEcrImageArgs.builder()
.repositoryName("linear-learner")
.imageTag("1")
.build());
var exampleAlgorithm = new Algorithm("exampleAlgorithm", AlgorithmArgs.builder()
.algorithmName("example-inference-algorithm")
.trainingSpecification(AlgorithmTrainingSpecificationArgs.builder()
.supportedTrainingInstanceTypes("ml.m5.large")
.trainingImage(example.registryPath())
.trainingChannels(AlgorithmTrainingSpecificationTrainingChannelArgs.builder()
.name("train")
.supportedContentTypes("text/csv")
.supportedInputModes("File")
.build())
.build())
.inferenceSpecification(AlgorithmInferenceSpecificationArgs.builder()
.supportedContentTypes("text/csv")
.supportedRealtimeInferenceInstanceTypes("ml.m5.large")
.supportedResponseMimeTypes("text/csv")
.supportedTransformInstanceTypes("ml.m5.large")
.containers(AlgorithmInferenceSpecificationContainerArgs.builder()
.containerHostname("test-host")
.environment(Map.of("TEST", "value"))
.framework("XGBOOST")
.frameworkVersion("1.5-1")
.image(example.registryPath())
.isCheckpoint(true)
.nearestModelName("nearest-model")
.baseModel(AlgorithmInferenceSpecificationContainerBaseModelArgs.builder()
.hubContentName("basemodel")
.hubContentVersion("1.0.0")
.recipeName("recipe")
.build())
.modelInput(AlgorithmInferenceSpecificationContainerModelInputArgs.builder()
.dataInputConfig("{}")
.build())
.build())
.build())
.build());
}
}
resources:
exampleAlgorithm:
type: aws:sagemaker:Algorithm
name: example
properties:
algorithmName: example-inference-algorithm
trainingSpecification:
supportedTrainingInstanceTypes:
- ml.m5.large
trainingImage: ${example.registryPath}
trainingChannels:
- name: train
supportedContentTypes:
- text/csv
supportedInputModes:
- File
inferenceSpecification:
supportedContentTypes:
- text/csv
supportedRealtimeInferenceInstanceTypes:
- ml.m5.large
supportedResponseMimeTypes:
- text/csv
supportedTransformInstanceTypes:
- ml.m5.large
containers:
- containerHostname: test-host
environment:
TEST: value
framework: XGBOOST
frameworkVersion: 1.5-1
image: ${example.registryPath}
isCheckpoint: true
nearestModelName: nearest-model
baseModel:
hubContentName: basemodel
hubContentVersion: 1.0.0
recipeName: recipe
modelInput:
dataInputConfig: '{}'
variables:
example:
fn::invoke:
function: aws:sagemaker:getPrebuiltEcrImage
arguments:
repositoryName: linear-learner
imageTag: '1'
Validation Specification
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const current = aws.getPartition({});
const example = aws.sagemaker.getPrebuiltEcrImage({
repositoryName: "linear-learner",
imageTag: "1",
});
const assumeRole = current.then(current => aws.iam.getPolicyDocument({
statements: [{
actions: ["sts:AssumeRole"],
principals: [{
type: "Service",
identifiers: [`sagemaker.${current.dnsSuffix}`],
}],
}],
}));
const exampleRole = new aws.iam.Role("example", {
name: "example-sagemaker-algorithm-role",
assumeRolePolicy: assumeRole.then(assumeRole => assumeRole.json),
});
const exampleRolePolicyAttachment = new aws.iam.RolePolicyAttachment("example", {
role: exampleRole.name,
policyArn: current.then(current => `arn:${current.partition}:iam::aws:policy/AmazonSageMakerFullAccess`),
});
const exampleBucket = new aws.s3.Bucket("example", {
bucket: "example-sagemaker-algorithm-validation-bucket",
forceDestroy: true,
});
const s3Access = aws.iam.getPolicyDocumentOutput({
statements: [{
effect: "Allow",
actions: [
"s3:GetBucketLocation",
"s3:ListBucket",
"s3:GetObject",
"s3:PutObject",
],
resources: [
exampleBucket.arn,
pulumi.interpolate`${exampleBucket.arn}/*`,
],
}],
});
const exampleRolePolicy = new aws.iam.RolePolicy("example", {
role: exampleRole.name,
policy: s3Access.apply(s3Access => s3Access.json),
});
const training = new aws.s3.BucketObjectv2("training", {
bucket: exampleBucket.bucket,
key: "algorithm/training/data.csv",
content: `1,1.0,0.0
0,0.0,1.0
1,1.0,1.0
0,0.0,0.0
`,
});
const transform = new aws.s3.BucketObjectv2("transform", {
bucket: exampleBucket.bucket,
key: "algorithm/transform/input.csv",
content: `1.0,0.0
0.0,1.0
`,
});
const exampleAlgorithm = new aws.sagemaker.Algorithm("example", {
algorithmName: "example-validation-algorithm",
trainingSpecification: {
trainingImage: example.then(example => example.registryPath),
supportedTrainingInstanceTypes: ["ml.m5.large"],
supportedHyperParameters: [
{
defaultValue: "2",
description: "Feature dimension",
isRequired: true,
isTunable: false,
name: "feature_dim",
type: "Integer",
range: {
integerParameterRangeSpecification: {
minValue: "2",
maxValue: "2",
},
},
},
{
defaultValue: "4",
description: "Mini batch size",
isRequired: true,
isTunable: false,
name: "mini_batch_size",
type: "Integer",
range: {
integerParameterRangeSpecification: {
minValue: "4",
maxValue: "4",
},
},
},
{
defaultValue: "binary_classifier",
description: "Predictor type",
isRequired: true,
isTunable: false,
name: "predictor_type",
type: "Categorical",
range: {
categoricalParameterRangeSpecification: {
values: ["binary_classifier"],
},
},
},
],
trainingChannels: [{
name: "train",
supportedContentTypes: ["text/csv"],
supportedInputModes: ["File"],
}],
},
inferenceSpecification: {
supportedContentTypes: ["text/csv"],
supportedResponseMimeTypes: ["text/csv"],
supportedTransformInstanceTypes: ["ml.m5.large"],
containers: [{
image: example.then(example => example.registryPath),
}],
},
validationSpecification: {
validationRole: exampleRole.arn,
validationProfiles: {
profileName: "validation-profile",
trainingJobDefinition: {
hyperParameters: {
feature_dim: "2",
mini_batch_size: "4",
predictor_type: "binary_classifier",
},
trainingInputMode: "File",
inputDataConfigs: [{
channelName: "train",
compressionType: "None",
contentType: "text/csv",
inputMode: "File",
recordWrapperType: "None",
shuffleConfig: {
seed: 1,
},
dataSource: {
s3DataSource: {
attributeNames: ["label"],
s3DataDistributionType: "ShardedByS3Key",
s3DataType: "S3Prefix",
s3Uri: pulumi.interpolate`s3://${exampleBucket.bucket}/algorithm/training/`,
},
},
}],
outputDataConfig: {
compressionType: "GZIP",
s3OutputPath: pulumi.interpolate`s3://${exampleBucket.bucket}/algorithm/output`,
},
resourceConfig: {
instanceCount: 1,
instanceType: "ml.m5.large",
keepAlivePeriodInSeconds: 60,
volumeSizeInGb: 30,
},
stoppingCondition: {
maxPendingTimeInSeconds: 7200,
maxRuntimeInSeconds: 1800,
maxWaitTimeInSeconds: 3600,
},
},
transformJobDefinition: {
batchStrategy: "MultiRecord",
environment: {
Te: "enabled",
},
maxConcurrentTransforms: 1,
maxPayloadInMb: 6,
transformInput: {
compressionType: "None",
contentType: "text/csv",
splitType: "Line",
dataSource: {
s3DataSource: {
s3DataType: "S3Prefix",
s3Uri: pulumi.interpolate`s3://${exampleBucket.bucket}/algorithm/transform/`,
},
},
},
transformOutput: {
accept: "text/csv",
assembleWith: "Line",
s3OutputPath: pulumi.interpolate`s3://${exampleBucket.bucket}/algorithm/transform-output`,
},
transformResources: {
instanceCount: 1,
instanceType: "ml.m5.large",
},
},
},
},
}, {
dependsOn: [
exampleRolePolicyAttachment,
exampleRolePolicy,
training,
transform,
],
});
import pulumi
import pulumi_aws as aws
current = aws.get_partition()
example = aws.sagemaker.get_prebuilt_ecr_image(repository_name="linear-learner",
image_tag="1")
assume_role = aws.iam.get_policy_document(statements=[{
"actions": ["sts:AssumeRole"],
"principals": [{
"type": "Service",
"identifiers": [f"sagemaker.{current.dns_suffix}"],
}],
}])
example_role = aws.iam.Role("example",
name="example-sagemaker-algorithm-role",
assume_role_policy=assume_role.json)
example_role_policy_attachment = aws.iam.RolePolicyAttachment("example",
role=example_role.name,
policy_arn=f"arn:{current.partition}:iam::aws:policy/AmazonSageMakerFullAccess")
example_bucket = aws.s3.Bucket("example",
bucket="example-sagemaker-algorithm-validation-bucket",
force_destroy=True)
s3_access = aws.iam.get_policy_document_output(statements=[{
"effect": "Allow",
"actions": [
"s3:GetBucketLocation",
"s3:ListBucket",
"s3:GetObject",
"s3:PutObject",
],
"resources": [
example_bucket.arn,
example_bucket.arn.apply(lambda arn: f"{arn}/*"),
],
}])
example_role_policy = aws.iam.RolePolicy("example",
role=example_role.name,
policy=s3_access.json)
training = aws.s3.BucketObjectv2("training",
bucket=example_bucket.bucket,
key="algorithm/training/data.csv",
content="""1,1.0,0.0
0,0.0,1.0
1,1.0,1.0
0,0.0,0.0
""")
transform = aws.s3.BucketObjectv2("transform",
bucket=example_bucket.bucket,
key="algorithm/transform/input.csv",
content="""1.0,0.0
0.0,1.0
""")
example_algorithm = aws.sagemaker.Algorithm("example",
algorithm_name="example-validation-algorithm",
training_specification={
"training_image": example.registry_path,
"supported_training_instance_types": ["ml.m5.large"],
"supported_hyper_parameters": [
{
"default_value": "2",
"description": "Feature dimension",
"is_required": True,
"is_tunable": False,
"name": "feature_dim",
"type": "Integer",
"range": {
"integer_parameter_range_specification": {
"min_value": "2",
"max_value": "2",
},
},
},
{
"default_value": "4",
"description": "Mini batch size",
"is_required": True,
"is_tunable": False,
"name": "mini_batch_size",
"type": "Integer",
"range": {
"integer_parameter_range_specification": {
"min_value": "4",
"max_value": "4",
},
},
},
{
"default_value": "binary_classifier",
"description": "Predictor type",
"is_required": True,
"is_tunable": False,
"name": "predictor_type",
"type": "Categorical",
"range": {
"categorical_parameter_range_specification": {
"values": ["binary_classifier"],
},
},
},
],
"training_channels": [{
"name": "train",
"supported_content_types": ["text/csv"],
"supported_input_modes": ["File"],
}],
},
inference_specification={
"supported_content_types": ["text/csv"],
"supported_response_mime_types": ["text/csv"],
"supported_transform_instance_types": ["ml.m5.large"],
"containers": [{
"image": example.registry_path,
}],
},
validation_specification={
"validation_role": example_role.arn,
"validation_profiles": {
"profile_name": "validation-profile",
"training_job_definition": {
"hyper_parameters": {
"feature_dim": "2",
"mini_batch_size": "4",
"predictor_type": "binary_classifier",
},
"training_input_mode": "File",
"input_data_configs": [{
"channel_name": "train",
"compression_type": "None",
"content_type": "text/csv",
"input_mode": "File",
"record_wrapper_type": "None",
"shuffle_config": {
"seed": 1,
},
"data_source": {
"s3_data_source": {
"attribute_names": ["label"],
"s3_data_distribution_type": "ShardedByS3Key",
"s3_data_type": "S3Prefix",
"s3_uri": example_bucket.bucket.apply(lambda bucket: f"s3://{bucket}/algorithm/training/"),
},
},
}],
"output_data_config": {
"compression_type": "GZIP",
"s3_output_path": example_bucket.bucket.apply(lambda bucket: f"s3://{bucket}/algorithm/output"),
},
"resource_config": {
"instance_count": 1,
"instance_type": "ml.m5.large",
"keep_alive_period_in_seconds": 60,
"volume_size_in_gb": 30,
},
"stopping_condition": {
"max_pending_time_in_seconds": 7200,
"max_runtime_in_seconds": 1800,
"max_wait_time_in_seconds": 3600,
},
},
"transform_job_definition": {
"batch_strategy": "MultiRecord",
"environment": {
"Te": "enabled",
},
"max_concurrent_transforms": 1,
"max_payload_in_mb": 6,
"transform_input": {
"compression_type": "None",
"content_type": "text/csv",
"split_type": "Line",
"data_source": {
"s3_data_source": {
"s3_data_type": "S3Prefix",
"s3_uri": example_bucket.bucket.apply(lambda bucket: f"s3://{bucket}/algorithm/transform/"),
},
},
},
"transform_output": {
"accept": "text/csv",
"assemble_with": "Line",
"s3_output_path": example_bucket.bucket.apply(lambda bucket: f"s3://{bucket}/algorithm/transform-output"),
},
"transform_resources": {
"instance_count": 1,
"instance_type": "ml.m5.large",
},
},
},
},
opts = pulumi.ResourceOptions(depends_on=[
example_role_policy_attachment,
example_role_policy,
training,
transform,
]))
package main
import (
"fmt"
"github.com/pulumi/pulumi-aws/sdk/v7/go/aws"
"github.com/pulumi/pulumi-aws/sdk/v7/go/aws/iam"
"github.com/pulumi/pulumi-aws/sdk/v7/go/aws/s3"
"github.com/pulumi/pulumi-aws/sdk/v7/go/aws/sagemaker"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
current, err := aws.GetPartition(ctx, &aws.GetPartitionArgs{}, nil)
if err != nil {
return err
}
example, err := sagemaker.GetPrebuiltEcrImage(ctx, &sagemaker.GetPrebuiltEcrImageArgs{
RepositoryName: "linear-learner",
ImageTag: pulumi.StringRef("1"),
}, nil)
if err != nil {
return err
}
assumeRole, err := iam.GetPolicyDocument(ctx, &iam.GetPolicyDocumentArgs{
Statements: []iam.GetPolicyDocumentStatement{
{
Actions: []string{
"sts:AssumeRole",
},
Principals: []iam.GetPolicyDocumentStatementPrincipal{
{
Type: "Service",
Identifiers: []string{
fmt.Sprintf("sagemaker.%v", current.DnsSuffix),
},
},
},
},
},
}, nil)
if err != nil {
return err
}
exampleRole, err := iam.NewRole(ctx, "example", &iam.RoleArgs{
Name: pulumi.String("example-sagemaker-algorithm-role"),
AssumeRolePolicy: pulumi.String(pulumi.String(assumeRole.Json)),
})
if err != nil {
return err
}
exampleRolePolicyAttachment, err := iam.NewRolePolicyAttachment(ctx, "example", &iam.RolePolicyAttachmentArgs{
Role: exampleRole.Name,
PolicyArn: pulumi.Sprintf("arn:%v:iam::aws:policy/AmazonSageMakerFullAccess", current.Partition),
})
if err != nil {
return err
}
exampleBucket, err := s3.NewBucket(ctx, "example", &s3.BucketArgs{
Bucket: pulumi.String("example-sagemaker-algorithm-validation-bucket"),
ForceDestroy: pulumi.Bool(true),
})
if err != nil {
return err
}
s3Access := iam.GetPolicyDocumentOutput(ctx, iam.GetPolicyDocumentOutputArgs{
Statements: iam.GetPolicyDocumentStatementArray{
&iam.GetPolicyDocumentStatementArgs{
Effect: pulumi.String("Allow"),
Actions: pulumi.StringArray{
pulumi.String("s3:GetBucketLocation"),
pulumi.String("s3:ListBucket"),
pulumi.String("s3:GetObject"),
pulumi.String("s3:PutObject"),
},
Resources: pulumi.StringArray{
exampleBucket.Arn,
exampleBucket.Arn.ApplyT(func(arn string) (string, error) {
return fmt.Sprintf("%v/*", arn), nil
}).(pulumi.StringOutput),
},
},
},
}, nil)
exampleRolePolicy, err := iam.NewRolePolicy(ctx, "example", &iam.RolePolicyArgs{
Role: exampleRole.Name,
Policy: pulumi.String(s3Access.ApplyT(func(s3Access iam.GetPolicyDocumentResult) (*string, error) {
return &s3Access.Json, nil
}).(pulumi.StringPtrOutput)),
})
if err != nil {
return err
}
training, err := s3.NewBucketObjectv2(ctx, "training", &s3.BucketObjectv2Args{
Bucket: exampleBucket.Bucket,
Key: pulumi.String("algorithm/training/data.csv"),
Content: pulumi.String("1,1.0,0.0\n0,0.0,1.0\n1,1.0,1.0\n0,0.0,0.0\n"),
})
if err != nil {
return err
}
transform, err := s3.NewBucketObjectv2(ctx, "transform", &s3.BucketObjectv2Args{
Bucket: exampleBucket.Bucket,
Key: pulumi.String("algorithm/transform/input.csv"),
Content: pulumi.String("1.0,0.0\n0.0,1.0\n"),
})
if err != nil {
return err
}
_, err = sagemaker.NewAlgorithm(ctx, "example", &sagemaker.AlgorithmArgs{
AlgorithmName: pulumi.String("example-validation-algorithm"),
TrainingSpecification: &sagemaker.AlgorithmTrainingSpecificationArgs{
TrainingImage: pulumi.String(example.RegistryPath),
SupportedTrainingInstanceTypes: pulumi.StringArray{
pulumi.String("ml.m5.large"),
},
SupportedHyperParameters: sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterArray{
&sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterArgs{
DefaultValue: pulumi.String("2"),
Description: pulumi.String("Feature dimension"),
IsRequired: pulumi.Bool(true),
IsTunable: pulumi.Bool(false),
Name: pulumi.String("feature_dim"),
Type: pulumi.String("Integer"),
Range: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs{
IntegerParameterRangeSpecification: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs{
MinValue: pulumi.String("2"),
MaxValue: pulumi.String("2"),
},
},
},
&sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterArgs{
DefaultValue: pulumi.String("4"),
Description: pulumi.String("Mini batch size"),
IsRequired: pulumi.Bool(true),
IsTunable: pulumi.Bool(false),
Name: pulumi.String("mini_batch_size"),
Type: pulumi.String("Integer"),
Range: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs{
IntegerParameterRangeSpecification: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs{
MinValue: pulumi.String("4"),
MaxValue: pulumi.String("4"),
},
},
},
&sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterArgs{
DefaultValue: pulumi.String("binary_classifier"),
Description: pulumi.String("Predictor type"),
IsRequired: pulumi.Bool(true),
IsTunable: pulumi.Bool(false),
Name: pulumi.String("predictor_type"),
Type: pulumi.String("Categorical"),
Range: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs{
CategoricalParameterRangeSpecification: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeCategoricalParameterRangeSpecificationArgs{
Values: pulumi.StringArray{
pulumi.String("binary_classifier"),
},
},
},
},
},
TrainingChannels: sagemaker.AlgorithmTrainingSpecificationTrainingChannelArray{
&sagemaker.AlgorithmTrainingSpecificationTrainingChannelArgs{
Name: pulumi.String("train"),
SupportedContentTypes: pulumi.StringArray{
pulumi.String("text/csv"),
},
SupportedInputModes: pulumi.StringArray{
pulumi.String("File"),
},
},
},
},
InferenceSpecification: &sagemaker.AlgorithmInferenceSpecificationArgs{
SupportedContentTypes: pulumi.StringArray{
pulumi.String("text/csv"),
},
SupportedResponseMimeTypes: pulumi.StringArray{
pulumi.String("text/csv"),
},
SupportedTransformInstanceTypes: pulumi.StringArray{
pulumi.String("ml.m5.large"),
},
Containers: sagemaker.AlgorithmInferenceSpecificationContainerArray{
&sagemaker.AlgorithmInferenceSpecificationContainerArgs{
Image: pulumi.String(example.RegistryPath),
},
},
},
ValidationSpecification: &sagemaker.AlgorithmValidationSpecificationArgs{
ValidationRole: exampleRole.Arn,
ValidationProfiles: &sagemaker.AlgorithmValidationSpecificationValidationProfilesArgs{
ProfileName: pulumi.String("validation-profile"),
TrainingJobDefinition: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionArgs{
HyperParameters: pulumi.StringMap{
"feature_dim": pulumi.String("2"),
"mini_batch_size": pulumi.String("4"),
"predictor_type": pulumi.String("binary_classifier"),
},
TrainingInputMode: pulumi.String("File"),
InputDataConfigs: sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigArray{
&sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigArgs{
ChannelName: pulumi.String("train"),
CompressionType: pulumi.String("None"),
ContentType: pulumi.String("text/csv"),
InputMode: pulumi.String("File"),
RecordWrapperType: pulumi.String("None"),
ShuffleConfig: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigShuffleConfigArgs{
Seed: pulumi.Int(1),
},
DataSource: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceArgs{
S3DataSource: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceArgs{
AttributeNames: pulumi.StringArray{
pulumi.String("label"),
},
S3DataDistributionType: pulumi.String("ShardedByS3Key"),
S3DataType: pulumi.String("S3Prefix"),
S3Uri: exampleBucket.Bucket.ApplyT(func(bucket string) (string, error) {
return fmt.Sprintf("s3://%v/algorithm/training/", bucket), nil
}).(pulumi.StringOutput),
},
},
},
},
OutputDataConfig: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionOutputDataConfigArgs{
CompressionType: pulumi.String("GZIP"),
S3OutputPath: exampleBucket.Bucket.ApplyT(func(bucket string) (string, error) {
return fmt.Sprintf("s3://%v/algorithm/output", bucket), nil
}).(pulumi.StringOutput),
},
ResourceConfig: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigArgs{
InstanceCount: pulumi.Int(1),
InstanceType: pulumi.String("ml.m5.large"),
KeepAlivePeriodInSeconds: pulumi.Int(60),
VolumeSizeInGb: pulumi.Int(30),
},
StoppingCondition: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionStoppingConditionArgs{
MaxPendingTimeInSeconds: pulumi.Int(7200),
MaxRuntimeInSeconds: pulumi.Int(1800),
MaxWaitTimeInSeconds: pulumi.Int(3600),
},
},
TransformJobDefinition: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionArgs{
BatchStrategy: pulumi.String("MultiRecord"),
Environment: pulumi.StringMap{
"Te": pulumi.String("enabled"),
},
MaxConcurrentTransforms: pulumi.Int(1),
MaxPayloadInMb: pulumi.Int(6),
TransformInput: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputArgs{
CompressionType: pulumi.String("None"),
ContentType: pulumi.String("text/csv"),
SplitType: pulumi.String("Line"),
DataSource: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceArgs{
S3DataSource: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceS3DataSourceArgs{
S3DataType: pulumi.String("S3Prefix"),
S3Uri: exampleBucket.Bucket.ApplyT(func(bucket string) (string, error) {
return fmt.Sprintf("s3://%v/algorithm/transform/", bucket), nil
}).(pulumi.StringOutput),
},
},
},
TransformOutput: sagemaker.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformOutputArgs{
Accept: pulumi.String("text/csv"),
AssembleWith: pulumi.String("Line"),
S3OutputPath: exampleBucket.Bucket.ApplyT(func(bucket string) (string, error) {
return fmt.Sprintf("s3://%v/algorithm/transform-output", bucket), nil
}).(pulumi.StringOutput),
},
TransformResources: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformResourcesArgs{
InstanceCount: pulumi.Int(1),
InstanceType: pulumi.String("ml.m5.large"),
},
},
},
},
}, pulumi.DependsOn([]pulumi.Resource{
exampleRolePolicyAttachment,
exampleRolePolicy,
training,
transform,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var current = Aws.GetPartition.Invoke();
var example = Aws.Sagemaker.GetPrebuiltEcrImage.Invoke(new()
{
RepositoryName = "linear-learner",
ImageTag = "1",
});
var assumeRole = Aws.Iam.GetPolicyDocument.Invoke(new()
{
Statements = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
{
Actions = new[]
{
"sts:AssumeRole",
},
Principals = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementPrincipalInputArgs
{
Type = "Service",
Identifiers = new[]
{
$"sagemaker.{current.Apply(getPartitionResult => getPartitionResult.DnsSuffix)}",
},
},
},
},
},
});
var exampleRole = new Aws.Iam.Role("example", new()
{
Name = "example-sagemaker-algorithm-role",
AssumeRolePolicy = assumeRole.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
});
var exampleRolePolicyAttachment = new Aws.Iam.RolePolicyAttachment("example", new()
{
Role = exampleRole.Name,
PolicyArn = $"arn:{current.Apply(getPartitionResult => getPartitionResult.Partition)}:iam::aws:policy/AmazonSageMakerFullAccess",
});
var exampleBucket = new Aws.S3.Bucket("example", new()
{
BucketName = "example-sagemaker-algorithm-validation-bucket",
ForceDestroy = true,
});
var s3Access = Aws.Iam.GetPolicyDocument.Invoke(new()
{
Statements = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
{
Effect = "Allow",
Actions = new[]
{
"s3:GetBucketLocation",
"s3:ListBucket",
"s3:GetObject",
"s3:PutObject",
},
Resources = new[]
{
exampleBucket.Arn,
$"{exampleBucket.Arn}/*",
},
},
},
});
var exampleRolePolicy = new Aws.Iam.RolePolicy("example", new()
{
Role = exampleRole.Name,
Policy = s3Access.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
});
var training = new Aws.S3.BucketObjectv2("training", new()
{
Bucket = exampleBucket.BucketName,
Key = "algorithm/training/data.csv",
Content = @"1,1.0,0.0
0,0.0,1.0
1,1.0,1.0
0,0.0,0.0
",
});
var transform = new Aws.S3.BucketObjectv2("transform", new()
{
Bucket = exampleBucket.BucketName,
Key = "algorithm/transform/input.csv",
Content = @"1.0,0.0
0.0,1.0
",
});
var exampleAlgorithm = new Aws.Sagemaker.Algorithm("example", new()
{
AlgorithmName = "example-validation-algorithm",
TrainingSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationArgs
{
TrainingImage = example.Apply(getPrebuiltEcrImageResult => getPrebuiltEcrImageResult.RegistryPath),
SupportedTrainingInstanceTypes = new[]
{
"ml.m5.large",
},
SupportedHyperParameters = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterArgs
{
DefaultValue = "2",
Description = "Feature dimension",
IsRequired = true,
IsTunable = false,
Name = "feature_dim",
Type = "Integer",
Range = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs
{
IntegerParameterRangeSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs
{
MinValue = "2",
MaxValue = "2",
},
},
},
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterArgs
{
DefaultValue = "4",
Description = "Mini batch size",
IsRequired = true,
IsTunable = false,
Name = "mini_batch_size",
Type = "Integer",
Range = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs
{
IntegerParameterRangeSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs
{
MinValue = "4",
MaxValue = "4",
},
},
},
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterArgs
{
DefaultValue = "binary_classifier",
Description = "Predictor type",
IsRequired = true,
IsTunable = false,
Name = "predictor_type",
Type = "Categorical",
Range = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs
{
CategoricalParameterRangeSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeCategoricalParameterRangeSpecificationArgs
{
Values = new[]
{
"binary_classifier",
},
},
},
},
},
TrainingChannels = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationTrainingChannelArgs
{
Name = "train",
SupportedContentTypes = new[]
{
"text/csv",
},
SupportedInputModes = new[]
{
"File",
},
},
},
},
InferenceSpecification = new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationArgs
{
SupportedContentTypes = new[]
{
"text/csv",
},
SupportedResponseMimeTypes = new[]
{
"text/csv",
},
SupportedTransformInstanceTypes = new[]
{
"ml.m5.large",
},
Containers = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationContainerArgs
{
Image = example.Apply(getPrebuiltEcrImageResult => getPrebuiltEcrImageResult.RegistryPath),
},
},
},
ValidationSpecification = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationArgs
{
ValidationRole = exampleRole.Arn,
ValidationProfiles = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesArgs
{
ProfileName = "validation-profile",
TrainingJobDefinition = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionArgs
{
HyperParameters =
{
{ "feature_dim", "2" },
{ "mini_batch_size", "4" },
{ "predictor_type", "binary_classifier" },
},
TrainingInputMode = "File",
InputDataConfigs = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigArgs
{
ChannelName = "train",
CompressionType = "None",
ContentType = "text/csv",
InputMode = "File",
RecordWrapperType = "None",
ShuffleConfig = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigShuffleConfigArgs
{
Seed = 1,
},
DataSource = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceArgs
{
S3DataSource = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceArgs
{
AttributeNames = new[]
{
"label",
},
S3DataDistributionType = "ShardedByS3Key",
S3DataType = "S3Prefix",
S3Uri = exampleBucket.BucketName.Apply(bucket => $"s3://{bucket}/algorithm/training/"),
},
},
},
},
OutputDataConfig = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionOutputDataConfigArgs
{
CompressionType = "GZIP",
S3OutputPath = exampleBucket.BucketName.Apply(bucket => $"s3://{bucket}/algorithm/output"),
},
ResourceConfig = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigArgs
{
InstanceCount = 1,
InstanceType = "ml.m5.large",
KeepAlivePeriodInSeconds = 60,
VolumeSizeInGb = 30,
},
StoppingCondition = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionStoppingConditionArgs
{
MaxPendingTimeInSeconds = 7200,
MaxRuntimeInSeconds = 1800,
MaxWaitTimeInSeconds = 3600,
},
},
TransformJobDefinition = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionArgs
{
BatchStrategy = "MultiRecord",
Environment =
{
{ "Te", "enabled" },
},
MaxConcurrentTransforms = 1,
MaxPayloadInMb = 6,
TransformInput = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputArgs
{
CompressionType = "None",
ContentType = "text/csv",
SplitType = "Line",
DataSource = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceArgs
{
S3DataSource = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceS3DataSourceArgs
{
S3DataType = "S3Prefix",
S3Uri = exampleBucket.BucketName.Apply(bucket => $"s3://{bucket}/algorithm/transform/"),
},
},
},
TransformOutput = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformOutputArgs
{
Accept = "text/csv",
AssembleWith = "Line",
S3OutputPath = exampleBucket.BucketName.Apply(bucket => $"s3://{bucket}/algorithm/transform-output"),
},
TransformResources = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformResourcesArgs
{
InstanceCount = 1,
InstanceType = "ml.m5.large",
},
},
},
},
}, new CustomResourceOptions
{
DependsOn =
{
exampleRolePolicyAttachment,
exampleRolePolicy,
training,
transform,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.AwsFunctions;
import com.pulumi.aws.inputs.GetPartitionArgs;
import com.pulumi.aws.sagemaker.SagemakerFunctions;
import com.pulumi.aws.sagemaker.inputs.GetPrebuiltEcrImageArgs;
import com.pulumi.aws.iam.IamFunctions;
import com.pulumi.aws.iam.inputs.GetPolicyDocumentArgs;
import com.pulumi.aws.iam.Role;
import com.pulumi.aws.iam.RoleArgs;
import com.pulumi.aws.iam.RolePolicyAttachment;
import com.pulumi.aws.iam.RolePolicyAttachmentArgs;
import com.pulumi.aws.s3.Bucket;
import com.pulumi.aws.s3.BucketArgs;
import com.pulumi.aws.iam.RolePolicy;
import com.pulumi.aws.iam.RolePolicyArgs;
import com.pulumi.aws.s3.BucketObjectv2;
import com.pulumi.aws.s3.BucketObjectv2Args;
import com.pulumi.aws.sagemaker.Algorithm;
import com.pulumi.aws.sagemaker.AlgorithmArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmTrainingSpecificationArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmInferenceSpecificationArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmValidationSpecificationArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmValidationSpecificationValidationProfilesArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionOutputDataConfigArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionStoppingConditionArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceS3DataSourceArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformOutputArgs;
import com.pulumi.aws.sagemaker.inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformResourcesArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var current = AwsFunctions.getPartition(GetPartitionArgs.builder()
.build());
final var example = SagemakerFunctions.getPrebuiltEcrImage(GetPrebuiltEcrImageArgs.builder()
.repositoryName("linear-learner")
.imageTag("1")
.build());
final var assumeRole = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
.statements(GetPolicyDocumentStatementArgs.builder()
.actions("sts:AssumeRole")
.principals(GetPolicyDocumentStatementPrincipalArgs.builder()
.type("Service")
.identifiers(String.format("sagemaker.%s", current.dnsSuffix()))
.build())
.build())
.build());
var exampleRole = new Role("exampleRole", RoleArgs.builder()
.name("example-sagemaker-algorithm-role")
.assumeRolePolicy(assumeRole.json())
.build());
var exampleRolePolicyAttachment = new RolePolicyAttachment("exampleRolePolicyAttachment", RolePolicyAttachmentArgs.builder()
.role(exampleRole.name())
.policyArn(String.format("arn:%s:iam::aws:policy/AmazonSageMakerFullAccess", current.partition()))
.build());
var exampleBucket = new Bucket("exampleBucket", BucketArgs.builder()
.bucket("example-sagemaker-algorithm-validation-bucket")
.forceDestroy(true)
.build());
final var s3Access = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
.statements(GetPolicyDocumentStatementArgs.builder()
.effect("Allow")
.actions(
"s3:GetBucketLocation",
"s3:ListBucket",
"s3:GetObject",
"s3:PutObject")
.resources(
exampleBucket.arn(),
exampleBucket.arn().applyValue(_arn -> String.format("%s/*", _arn)))
.build())
.build());
var exampleRolePolicy = new RolePolicy("exampleRolePolicy", RolePolicyArgs.builder()
.role(exampleRole.name())
.policy(s3Access.applyValue(_s3Access -> _s3Access.json()))
.build());
var training = new BucketObjectv2("training", BucketObjectv2Args.builder()
.bucket(exampleBucket.bucket())
.key("algorithm/training/data.csv")
.content("""
1,1.0,0.0
0,0.0,1.0
1,1.0,1.0
0,0.0,0.0
""")
.build());
var transform = new BucketObjectv2("transform", BucketObjectv2Args.builder()
.bucket(exampleBucket.bucket())
.key("algorithm/transform/input.csv")
.content("""
1.0,0.0
0.0,1.0
""")
.build());
var exampleAlgorithm = new Algorithm("exampleAlgorithm", AlgorithmArgs.builder()
.algorithmName("example-validation-algorithm")
.trainingSpecification(AlgorithmTrainingSpecificationArgs.builder()
.trainingImage(example.registryPath())
.supportedTrainingInstanceTypes("ml.m5.large")
.supportedHyperParameters(
AlgorithmTrainingSpecificationSupportedHyperParameterArgs.builder()
.defaultValue("2")
.description("Feature dimension")
.isRequired(true)
.isTunable(false)
.name("feature_dim")
.type("Integer")
.range(AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs.builder()
.integerParameterRangeSpecification(AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs.builder()
.minValue("2")
.maxValue("2")
.build())
.build())
.build(),
AlgorithmTrainingSpecificationSupportedHyperParameterArgs.builder()
.defaultValue("4")
.description("Mini batch size")
.isRequired(true)
.isTunable(false)
.name("mini_batch_size")
.type("Integer")
.range(AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs.builder()
.integerParameterRangeSpecification(AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs.builder()
.minValue("4")
.maxValue("4")
.build())
.build())
.build(),
AlgorithmTrainingSpecificationSupportedHyperParameterArgs.builder()
.defaultValue("binary_classifier")
.description("Predictor type")
.isRequired(true)
.isTunable(false)
.name("predictor_type")
.type("Categorical")
.range(AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs.builder()
.categoricalParameterRangeSpecification(AlgorithmTrainingSpecificationSupportedHyperParameterRangeCategoricalParameterRangeSpecificationArgs.builder()
.values("binary_classifier")
.build())
.build())
.build())
.trainingChannels(AlgorithmTrainingSpecificationTrainingChannelArgs.builder()
.name("train")
.supportedContentTypes("text/csv")
.supportedInputModes("File")
.build())
.build())
.inferenceSpecification(AlgorithmInferenceSpecificationArgs.builder()
.supportedContentTypes("text/csv")
.supportedResponseMimeTypes("text/csv")
.supportedTransformInstanceTypes("ml.m5.large")
.containers(AlgorithmInferenceSpecificationContainerArgs.builder()
.image(example.registryPath())
.build())
.build())
.validationSpecification(AlgorithmValidationSpecificationArgs.builder()
.validationRole(exampleRole.arn())
.validationProfiles(AlgorithmValidationSpecificationValidationProfilesArgs.builder()
.profileName("validation-profile")
.trainingJobDefinition(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionArgs.builder()
.hyperParameters(Map.ofEntries(
Map.entry("feature_dim", "2"),
Map.entry("mini_batch_size", "4"),
Map.entry("predictor_type", "binary_classifier")
))
.trainingInputMode("File")
.inputDataConfigs(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigArgs.builder()
.channelName("train")
.compressionType("None")
.contentType("text/csv")
.inputMode("File")
.recordWrapperType("None")
.shuffleConfig(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigShuffleConfigArgs.builder()
.seed(1)
.build())
.dataSource(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceArgs.builder()
.s3DataSource(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceArgs.builder()
.attributeNames("label")
.s3DataDistributionType("ShardedByS3Key")
.s3DataType("S3Prefix")
.s3Uri(exampleBucket.bucket().applyValue(_bucket -> String.format("s3://%s/algorithm/training/", _bucket)))
.build())
.build())
.build())
.outputDataConfig(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionOutputDataConfigArgs.builder()
.compressionType("GZIP")
.s3OutputPath(exampleBucket.bucket().applyValue(_bucket -> String.format("s3://%s/algorithm/output", _bucket)))
.build())
.resourceConfig(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigArgs.builder()
.instanceCount(1)
.instanceType("ml.m5.large")
.keepAlivePeriodInSeconds(60)
.volumeSizeInGb(30)
.build())
.stoppingCondition(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionStoppingConditionArgs.builder()
.maxPendingTimeInSeconds(7200)
.maxRuntimeInSeconds(1800)
.maxWaitTimeInSeconds(3600)
.build())
.build())
.transformJobDefinition(AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionArgs.builder()
.batchStrategy("MultiRecord")
.environment(Map.of("Te", "enabled"))
.maxConcurrentTransforms(1)
.maxPayloadInMb(6)
.transformInput(AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputArgs.builder()
.compressionType("None")
.contentType("text/csv")
.splitType("Line")
.dataSource(AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceArgs.builder()
.s3DataSource(AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceS3DataSourceArgs.builder()
.s3DataType("S3Prefix")
.s3Uri(exampleBucket.bucket().applyValue(_bucket -> String.format("s3://%s/algorithm/transform/", _bucket)))
.build())
.build())
.build())
.transformOutput(AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformOutputArgs.builder()
.accept("text/csv")
.assembleWith("Line")
.s3OutputPath(exampleBucket.bucket().applyValue(_bucket -> String.format("s3://%s/algorithm/transform-output", _bucket)))
.build())
.transformResources(AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformResourcesArgs.builder()
.instanceCount(1)
.instanceType("ml.m5.large")
.build())
.build())
.build())
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(
exampleRolePolicyAttachment,
exampleRolePolicy,
training,
transform)
.build());
}
}
resources:
exampleRole:
type: aws:iam:Role
name: example
properties:
name: example-sagemaker-algorithm-role
assumeRolePolicy: ${assumeRole.json}
exampleRolePolicyAttachment:
type: aws:iam:RolePolicyAttachment
name: example
properties:
role: ${exampleRole.name}
policyArn: arn:${current.partition}:iam::aws:policy/AmazonSageMakerFullAccess
exampleBucket:
type: aws:s3:Bucket
name: example
properties:
bucket: example-sagemaker-algorithm-validation-bucket
forceDestroy: true
exampleRolePolicy:
type: aws:iam:RolePolicy
name: example
properties:
role: ${exampleRole.name}
policy: ${s3Access.json}
training:
type: aws:s3:BucketObjectv2
properties:
bucket: ${exampleBucket.bucket}
key: algorithm/training/data.csv
content: |
1,1.0,0.0
0,0.0,1.0
1,1.0,1.0
0,0.0,0.0
transform:
type: aws:s3:BucketObjectv2
properties:
bucket: ${exampleBucket.bucket}
key: algorithm/transform/input.csv
content: |
1.0,0.0
0.0,1.0
exampleAlgorithm:
type: aws:sagemaker:Algorithm
name: example
properties:
algorithmName: example-validation-algorithm
trainingSpecification:
trainingImage: ${example.registryPath}
supportedTrainingInstanceTypes:
- ml.m5.large
supportedHyperParameters:
- defaultValue: '2'
description: Feature dimension
isRequired: true
isTunable: false
name: feature_dim
type: Integer
range:
integerParameterRangeSpecification:
minValue: '2'
maxValue: '2'
- defaultValue: '4'
description: Mini batch size
isRequired: true
isTunable: false
name: mini_batch_size
type: Integer
range:
integerParameterRangeSpecification:
minValue: '4'
maxValue: '4'
- defaultValue: binary_classifier
description: Predictor type
isRequired: true
isTunable: false
name: predictor_type
type: Categorical
range:
categoricalParameterRangeSpecification:
values:
- binary_classifier
trainingChannels:
- name: train
supportedContentTypes:
- text/csv
supportedInputModes:
- File
inferenceSpecification:
supportedContentTypes:
- text/csv
supportedResponseMimeTypes:
- text/csv
supportedTransformInstanceTypes:
- ml.m5.large
containers:
- image: ${example.registryPath}
validationSpecification:
validationRole: ${exampleRole.arn}
validationProfiles:
profileName: validation-profile
trainingJobDefinition:
hyperParameters:
feature_dim: '2'
mini_batch_size: '4'
predictor_type: binary_classifier
trainingInputMode: File
inputDataConfigs:
- channelName: train
compressionType: None
contentType: text/csv
inputMode: File
recordWrapperType: None
shuffleConfig:
seed: 1
dataSource:
s3DataSource:
attributeNames:
- label
s3DataDistributionType: ShardedByS3Key
s3DataType: S3Prefix
s3Uri: s3://${exampleBucket.bucket}/algorithm/training/
outputDataConfig:
compressionType: GZIP
s3OutputPath: s3://${exampleBucket.bucket}/algorithm/output
resourceConfig:
instanceCount: 1
instanceType: ml.m5.large
keepAlivePeriodInSeconds: 60
volumeSizeInGb: 30
stoppingCondition:
maxPendingTimeInSeconds: 7200
maxRuntimeInSeconds: 1800
maxWaitTimeInSeconds: 3600
transformJobDefinition:
batchStrategy: MultiRecord
environment:
Te: enabled
maxConcurrentTransforms: 1
maxPayloadInMb: 6
transformInput:
compressionType: None
contentType: text/csv
splitType: Line
dataSource:
s3DataSource:
s3DataType: S3Prefix
s3Uri: s3://${exampleBucket.bucket}/algorithm/transform/
transformOutput:
accept: text/csv
assembleWith: Line
s3OutputPath: s3://${exampleBucket.bucket}/algorithm/transform-output
transformResources:
instanceCount: 1
instanceType: ml.m5.large
options:
dependsOn:
- ${exampleRolePolicyAttachment}
- ${exampleRolePolicy}
- ${training}
- ${transform}
variables:
current:
fn::invoke:
function: aws:getPartition
arguments: {}
example:
fn::invoke:
function: aws:sagemaker:getPrebuiltEcrImage
arguments:
repositoryName: linear-learner
imageTag: '1'
assumeRole:
fn::invoke:
function: aws:iam:getPolicyDocument
arguments:
statements:
- actions:
- sts:AssumeRole
principals:
- type: Service
identifiers:
- sagemaker.${current.dnsSuffix}
s3Access:
fn::invoke:
function: aws:iam:getPolicyDocument
arguments:
statements:
- effect: Allow
actions:
- s3:GetBucketLocation
- s3:ListBucket
- s3:GetObject
- s3:PutObject
resources:
- ${exampleBucket.arn}
- ${exampleBucket.arn}/*
Create Algorithm Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Algorithm(name: string, args: AlgorithmArgs, opts?: CustomResourceOptions);@overload
def Algorithm(resource_name: str,
args: AlgorithmArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Algorithm(resource_name: str,
opts: Optional[ResourceOptions] = None,
algorithm_name: Optional[str] = None,
training_specification: Optional[AlgorithmTrainingSpecificationArgs] = None,
algorithm_description: Optional[str] = None,
certify_for_marketplace: Optional[bool] = None,
inference_specification: Optional[AlgorithmInferenceSpecificationArgs] = None,
region: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
timeouts: Optional[AlgorithmTimeoutsArgs] = None,
validation_specification: Optional[AlgorithmValidationSpecificationArgs] = None)func NewAlgorithm(ctx *Context, name string, args AlgorithmArgs, opts ...ResourceOption) (*Algorithm, error)public Algorithm(string name, AlgorithmArgs args, CustomResourceOptions? opts = null)
public Algorithm(String name, AlgorithmArgs args)
public Algorithm(String name, AlgorithmArgs args, CustomResourceOptions options)
type: aws:sagemaker:Algorithm
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args AlgorithmArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args AlgorithmArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args AlgorithmArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args AlgorithmArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args AlgorithmArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var algorithmResource = new Aws.Sagemaker.Algorithm("algorithmResource", new()
{
AlgorithmName = "string",
TrainingSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationArgs
{
SupportedTrainingInstanceTypes = new[]
{
"string",
},
TrainingChannels = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationTrainingChannelArgs
{
Name = "string",
SupportedContentTypes = new[]
{
"string",
},
SupportedInputModes = new[]
{
"string",
},
Description = "string",
IsRequired = false,
SupportedCompressionTypes = new[]
{
"string",
},
},
},
TrainingImage = "string",
AdditionalS3DataSource = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationAdditionalS3DataSourceArgs
{
S3DataType = "string",
S3Uri = "string",
CompressionType = "string",
Etag = "string",
},
MetricDefinitions = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationMetricDefinitionArgs
{
Name = "string",
Regex = "string",
},
},
SupportedHyperParameters = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterArgs
{
Name = "string",
Type = "string",
DefaultValue = "string",
Description = "string",
IsRequired = false,
IsTunable = false,
Range = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs
{
CategoricalParameterRangeSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeCategoricalParameterRangeSpecificationArgs
{
Values = new[]
{
"string",
},
},
ContinuousParameterRangeSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeContinuousParameterRangeSpecificationArgs
{
MaxValue = "string",
MinValue = "string",
},
IntegerParameterRangeSpecification = new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs
{
MaxValue = "string",
MinValue = "string",
},
},
},
},
SupportedTuningJobObjectiveMetrics = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmTrainingSpecificationSupportedTuningJobObjectiveMetricArgs
{
MetricName = "string",
Type = "string",
},
},
SupportsDistributedTraining = false,
TrainingImageDigest = "string",
},
AlgorithmDescription = "string",
CertifyForMarketplace = false,
InferenceSpecification = new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationArgs
{
Containers = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationContainerArgs
{
AdditionalS3DataSource = new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationContainerAdditionalS3DataSourceArgs
{
S3DataType = "string",
S3Uri = "string",
CompressionType = "string",
Etag = "string",
},
BaseModel = new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationContainerBaseModelArgs
{
HubContentName = "string",
HubContentVersion = "string",
RecipeName = "string",
},
ContainerHostname = "string",
Environment =
{
{ "string", "string" },
},
Framework = "string",
FrameworkVersion = "string",
Image = "string",
ImageDigest = "string",
IsCheckpoint = false,
ModelDataEtag = "string",
ModelDataSource = new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationContainerModelDataSourceArgs
{
S3DataSource = new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceArgs
{
CompressionType = "string",
S3DataType = "string",
S3Uri = "string",
Etag = "string",
HubAccessConfig = new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceHubAccessConfigArgs
{
HubContentArn = "string",
},
ManifestEtag = "string",
ManifestS3Uri = "string",
ModelAccessConfig = new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceModelAccessConfigArgs
{
AcceptEula = false,
},
},
},
ModelDataUrl = "string",
ModelInput = new Aws.Sagemaker.Inputs.AlgorithmInferenceSpecificationContainerModelInputArgs
{
DataInputConfig = "string",
},
NearestModelName = "string",
ProductId = "string",
},
},
SupportedContentTypes = new[]
{
"string",
},
SupportedRealtimeInferenceInstanceTypes = new[]
{
"string",
},
SupportedResponseMimeTypes = new[]
{
"string",
},
SupportedTransformInstanceTypes = new[]
{
"string",
},
},
Region = "string",
Tags =
{
{ "string", "string" },
},
Timeouts = new Aws.Sagemaker.Inputs.AlgorithmTimeoutsArgs
{
Create = "string",
Delete = "string",
},
ValidationSpecification = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationArgs
{
ValidationProfiles = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesArgs
{
ProfileName = "string",
TrainingJobDefinition = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionArgs
{
InputDataConfigs = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigArgs
{
ChannelName = "string",
DataSource = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceArgs
{
FileSystemDataSource = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceFileSystemDataSourceArgs
{
DirectoryPath = "string",
FileSystemAccessMode = "string",
FileSystemId = "string",
FileSystemType = "string",
},
S3DataSource = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceArgs
{
S3DataType = "string",
S3Uri = "string",
AttributeNames = new[]
{
"string",
},
HubAccessConfig = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceHubAccessConfigArgs
{
HubContentArn = "string",
},
InstanceGroupNames = new[]
{
"string",
},
ModelAccessConfig = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceModelAccessConfigArgs
{
AcceptEula = false,
},
S3DataDistributionType = "string",
},
},
CompressionType = "string",
ContentType = "string",
InputMode = "string",
RecordWrapperType = "string",
ShuffleConfig = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigShuffleConfigArgs
{
Seed = 0,
},
},
},
OutputDataConfig = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionOutputDataConfigArgs
{
S3OutputPath = "string",
CompressionType = "string",
KmsKeyId = "string",
},
ResourceConfig = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigArgs
{
InstanceCount = 0,
InstanceGroups = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstanceGroupArgs
{
InstanceCount = 0,
InstanceGroupName = "string",
InstanceType = "string",
},
},
InstancePlacementConfig = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstancePlacementConfigArgs
{
EnableMultipleJobs = false,
PlacementSpecifications = new[]
{
new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstancePlacementConfigPlacementSpecificationArgs
{
InstanceCount = 0,
UltraServerId = "string",
},
},
},
InstanceType = "string",
KeepAlivePeriodInSeconds = 0,
TrainingPlanArn = "string",
VolumeKmsKeyId = "string",
VolumeSizeInGb = 0,
},
StoppingCondition = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionStoppingConditionArgs
{
MaxPendingTimeInSeconds = 0,
MaxRuntimeInSeconds = 0,
MaxWaitTimeInSeconds = 0,
},
TrainingInputMode = "string",
HyperParameters =
{
{ "string", "string" },
},
},
TransformJobDefinition = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionArgs
{
TransformInput = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputArgs
{
DataSource = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceArgs
{
S3DataSource = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceS3DataSourceArgs
{
S3DataType = "string",
S3Uri = "string",
},
},
CompressionType = "string",
ContentType = "string",
SplitType = "string",
},
TransformOutput = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformOutputArgs
{
S3OutputPath = "string",
Accept = "string",
AssembleWith = "string",
KmsKeyId = "string",
},
TransformResources = new Aws.Sagemaker.Inputs.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformResourcesArgs
{
InstanceCount = 0,
InstanceType = "string",
TransformAmiVersion = "string",
VolumeKmsKeyId = "string",
},
BatchStrategy = "string",
Environment =
{
{ "string", "string" },
},
MaxConcurrentTransforms = 0,
MaxPayloadInMb = 0,
},
},
ValidationRole = "string",
},
});
example, err := sagemaker.NewAlgorithm(ctx, "algorithmResource", &sagemaker.AlgorithmArgs{
AlgorithmName: pulumi.String("string"),
TrainingSpecification: &sagemaker.AlgorithmTrainingSpecificationArgs{
SupportedTrainingInstanceTypes: pulumi.StringArray{
pulumi.String("string"),
},
TrainingChannels: sagemaker.AlgorithmTrainingSpecificationTrainingChannelArray{
&sagemaker.AlgorithmTrainingSpecificationTrainingChannelArgs{
Name: pulumi.String("string"),
SupportedContentTypes: pulumi.StringArray{
pulumi.String("string"),
},
SupportedInputModes: pulumi.StringArray{
pulumi.String("string"),
},
Description: pulumi.String("string"),
IsRequired: pulumi.Bool(false),
SupportedCompressionTypes: pulumi.StringArray{
pulumi.String("string"),
},
},
},
TrainingImage: pulumi.String("string"),
AdditionalS3DataSource: &sagemaker.AlgorithmTrainingSpecificationAdditionalS3DataSourceArgs{
S3DataType: pulumi.String("string"),
S3Uri: pulumi.String("string"),
CompressionType: pulumi.String("string"),
Etag: pulumi.String("string"),
},
MetricDefinitions: sagemaker.AlgorithmTrainingSpecificationMetricDefinitionArray{
&sagemaker.AlgorithmTrainingSpecificationMetricDefinitionArgs{
Name: pulumi.String("string"),
Regex: pulumi.String("string"),
},
},
SupportedHyperParameters: sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterArray{
&sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterArgs{
Name: pulumi.String("string"),
Type: pulumi.String("string"),
DefaultValue: pulumi.String("string"),
Description: pulumi.String("string"),
IsRequired: pulumi.Bool(false),
IsTunable: pulumi.Bool(false),
Range: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs{
CategoricalParameterRangeSpecification: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeCategoricalParameterRangeSpecificationArgs{
Values: pulumi.StringArray{
pulumi.String("string"),
},
},
ContinuousParameterRangeSpecification: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeContinuousParameterRangeSpecificationArgs{
MaxValue: pulumi.String("string"),
MinValue: pulumi.String("string"),
},
IntegerParameterRangeSpecification: &sagemaker.AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs{
MaxValue: pulumi.String("string"),
MinValue: pulumi.String("string"),
},
},
},
},
SupportedTuningJobObjectiveMetrics: sagemaker.AlgorithmTrainingSpecificationSupportedTuningJobObjectiveMetricArray{
&sagemaker.AlgorithmTrainingSpecificationSupportedTuningJobObjectiveMetricArgs{
MetricName: pulumi.String("string"),
Type: pulumi.String("string"),
},
},
SupportsDistributedTraining: pulumi.Bool(false),
TrainingImageDigest: pulumi.String("string"),
},
AlgorithmDescription: pulumi.String("string"),
CertifyForMarketplace: pulumi.Bool(false),
InferenceSpecification: &sagemaker.AlgorithmInferenceSpecificationArgs{
Containers: sagemaker.AlgorithmInferenceSpecificationContainerArray{
&sagemaker.AlgorithmInferenceSpecificationContainerArgs{
AdditionalS3DataSource: &sagemaker.AlgorithmInferenceSpecificationContainerAdditionalS3DataSourceArgs{
S3DataType: pulumi.String("string"),
S3Uri: pulumi.String("string"),
CompressionType: pulumi.String("string"),
Etag: pulumi.String("string"),
},
BaseModel: &sagemaker.AlgorithmInferenceSpecificationContainerBaseModelArgs{
HubContentName: pulumi.String("string"),
HubContentVersion: pulumi.String("string"),
RecipeName: pulumi.String("string"),
},
ContainerHostname: pulumi.String("string"),
Environment: pulumi.StringMap{
"string": pulumi.String("string"),
},
Framework: pulumi.String("string"),
FrameworkVersion: pulumi.String("string"),
Image: pulumi.String("string"),
ImageDigest: pulumi.String("string"),
IsCheckpoint: pulumi.Bool(false),
ModelDataEtag: pulumi.String("string"),
ModelDataSource: &sagemaker.AlgorithmInferenceSpecificationContainerModelDataSourceArgs{
S3DataSource: &sagemaker.AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceArgs{
CompressionType: pulumi.String("string"),
S3DataType: pulumi.String("string"),
S3Uri: pulumi.String("string"),
Etag: pulumi.String("string"),
HubAccessConfig: &sagemaker.AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceHubAccessConfigArgs{
HubContentArn: pulumi.String("string"),
},
ManifestEtag: pulumi.String("string"),
ManifestS3Uri: pulumi.String("string"),
ModelAccessConfig: &sagemaker.AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceModelAccessConfigArgs{
AcceptEula: pulumi.Bool(false),
},
},
},
ModelDataUrl: pulumi.String("string"),
ModelInput: &sagemaker.AlgorithmInferenceSpecificationContainerModelInputArgs{
DataInputConfig: pulumi.String("string"),
},
NearestModelName: pulumi.String("string"),
ProductId: pulumi.String("string"),
},
},
SupportedContentTypes: pulumi.StringArray{
pulumi.String("string"),
},
SupportedRealtimeInferenceInstanceTypes: pulumi.StringArray{
pulumi.String("string"),
},
SupportedResponseMimeTypes: pulumi.StringArray{
pulumi.String("string"),
},
SupportedTransformInstanceTypes: pulumi.StringArray{
pulumi.String("string"),
},
},
Region: pulumi.String("string"),
Tags: pulumi.StringMap{
"string": pulumi.String("string"),
},
Timeouts: &sagemaker.AlgorithmTimeoutsArgs{
Create: pulumi.String("string"),
Delete: pulumi.String("string"),
},
ValidationSpecification: &sagemaker.AlgorithmValidationSpecificationArgs{
ValidationProfiles: &sagemaker.AlgorithmValidationSpecificationValidationProfilesArgs{
ProfileName: pulumi.String("string"),
TrainingJobDefinition: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionArgs{
InputDataConfigs: sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigArray{
&sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigArgs{
ChannelName: pulumi.String("string"),
DataSource: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceArgs{
FileSystemDataSource: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceFileSystemDataSourceArgs{
DirectoryPath: pulumi.String("string"),
FileSystemAccessMode: pulumi.String("string"),
FileSystemId: pulumi.String("string"),
FileSystemType: pulumi.String("string"),
},
S3DataSource: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceArgs{
S3DataType: pulumi.String("string"),
S3Uri: pulumi.String("string"),
AttributeNames: pulumi.StringArray{
pulumi.String("string"),
},
HubAccessConfig: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceHubAccessConfigArgs{
HubContentArn: pulumi.String("string"),
},
InstanceGroupNames: pulumi.StringArray{
pulumi.String("string"),
},
ModelAccessConfig: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceModelAccessConfigArgs{
AcceptEula: pulumi.Bool(false),
},
S3DataDistributionType: pulumi.String("string"),
},
},
CompressionType: pulumi.String("string"),
ContentType: pulumi.String("string"),
InputMode: pulumi.String("string"),
RecordWrapperType: pulumi.String("string"),
ShuffleConfig: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigShuffleConfigArgs{
Seed: pulumi.Int(0),
},
},
},
OutputDataConfig: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionOutputDataConfigArgs{
S3OutputPath: pulumi.String("string"),
CompressionType: pulumi.String("string"),
KmsKeyId: pulumi.String("string"),
},
ResourceConfig: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigArgs{
InstanceCount: pulumi.Int(0),
InstanceGroups: sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstanceGroupArray{
&sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstanceGroupArgs{
InstanceCount: pulumi.Int(0),
InstanceGroupName: pulumi.String("string"),
InstanceType: pulumi.String("string"),
},
},
InstancePlacementConfig: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstancePlacementConfigArgs{
EnableMultipleJobs: pulumi.Bool(false),
PlacementSpecifications: sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstancePlacementConfigPlacementSpecificationArray{
&sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstancePlacementConfigPlacementSpecificationArgs{
InstanceCount: pulumi.Int(0),
UltraServerId: pulumi.String("string"),
},
},
},
InstanceType: pulumi.String("string"),
KeepAlivePeriodInSeconds: pulumi.Int(0),
TrainingPlanArn: pulumi.String("string"),
VolumeKmsKeyId: pulumi.String("string"),
VolumeSizeInGb: pulumi.Int(0),
},
StoppingCondition: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionStoppingConditionArgs{
MaxPendingTimeInSeconds: pulumi.Int(0),
MaxRuntimeInSeconds: pulumi.Int(0),
MaxWaitTimeInSeconds: pulumi.Int(0),
},
TrainingInputMode: pulumi.String("string"),
HyperParameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
},
TransformJobDefinition: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionArgs{
TransformInput: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputArgs{
DataSource: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceArgs{
S3DataSource: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceS3DataSourceArgs{
S3DataType: pulumi.String("string"),
S3Uri: pulumi.String("string"),
},
},
CompressionType: pulumi.String("string"),
ContentType: pulumi.String("string"),
SplitType: pulumi.String("string"),
},
TransformOutput: sagemaker.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformOutputArgs{
S3OutputPath: pulumi.String("string"),
Accept: pulumi.String("string"),
AssembleWith: pulumi.String("string"),
KmsKeyId: pulumi.String("string"),
},
TransformResources: &sagemaker.AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformResourcesArgs{
InstanceCount: pulumi.Int(0),
InstanceType: pulumi.String("string"),
TransformAmiVersion: pulumi.String("string"),
VolumeKmsKeyId: pulumi.String("string"),
},
BatchStrategy: pulumi.String("string"),
Environment: pulumi.StringMap{
"string": pulumi.String("string"),
},
MaxConcurrentTransforms: pulumi.Int(0),
MaxPayloadInMb: pulumi.Int(0),
},
},
ValidationRole: pulumi.String("string"),
},
})
var algorithmResource = new Algorithm("algorithmResource", AlgorithmArgs.builder()
.algorithmName("string")
.trainingSpecification(AlgorithmTrainingSpecificationArgs.builder()
.supportedTrainingInstanceTypes("string")
.trainingChannels(AlgorithmTrainingSpecificationTrainingChannelArgs.builder()
.name("string")
.supportedContentTypes("string")
.supportedInputModes("string")
.description("string")
.isRequired(false)
.supportedCompressionTypes("string")
.build())
.trainingImage("string")
.additionalS3DataSource(AlgorithmTrainingSpecificationAdditionalS3DataSourceArgs.builder()
.s3DataType("string")
.s3Uri("string")
.compressionType("string")
.etag("string")
.build())
.metricDefinitions(AlgorithmTrainingSpecificationMetricDefinitionArgs.builder()
.name("string")
.regex("string")
.build())
.supportedHyperParameters(AlgorithmTrainingSpecificationSupportedHyperParameterArgs.builder()
.name("string")
.type("string")
.defaultValue("string")
.description("string")
.isRequired(false)
.isTunable(false)
.range(AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs.builder()
.categoricalParameterRangeSpecification(AlgorithmTrainingSpecificationSupportedHyperParameterRangeCategoricalParameterRangeSpecificationArgs.builder()
.values("string")
.build())
.continuousParameterRangeSpecification(AlgorithmTrainingSpecificationSupportedHyperParameterRangeContinuousParameterRangeSpecificationArgs.builder()
.maxValue("string")
.minValue("string")
.build())
.integerParameterRangeSpecification(AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs.builder()
.maxValue("string")
.minValue("string")
.build())
.build())
.build())
.supportedTuningJobObjectiveMetrics(AlgorithmTrainingSpecificationSupportedTuningJobObjectiveMetricArgs.builder()
.metricName("string")
.type("string")
.build())
.supportsDistributedTraining(false)
.trainingImageDigest("string")
.build())
.algorithmDescription("string")
.certifyForMarketplace(false)
.inferenceSpecification(AlgorithmInferenceSpecificationArgs.builder()
.containers(AlgorithmInferenceSpecificationContainerArgs.builder()
.additionalS3DataSource(AlgorithmInferenceSpecificationContainerAdditionalS3DataSourceArgs.builder()
.s3DataType("string")
.s3Uri("string")
.compressionType("string")
.etag("string")
.build())
.baseModel(AlgorithmInferenceSpecificationContainerBaseModelArgs.builder()
.hubContentName("string")
.hubContentVersion("string")
.recipeName("string")
.build())
.containerHostname("string")
.environment(Map.of("string", "string"))
.framework("string")
.frameworkVersion("string")
.image("string")
.imageDigest("string")
.isCheckpoint(false)
.modelDataEtag("string")
.modelDataSource(AlgorithmInferenceSpecificationContainerModelDataSourceArgs.builder()
.s3DataSource(AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceArgs.builder()
.compressionType("string")
.s3DataType("string")
.s3Uri("string")
.etag("string")
.hubAccessConfig(AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceHubAccessConfigArgs.builder()
.hubContentArn("string")
.build())
.manifestEtag("string")
.manifestS3Uri("string")
.modelAccessConfig(AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceModelAccessConfigArgs.builder()
.acceptEula(false)
.build())
.build())
.build())
.modelDataUrl("string")
.modelInput(AlgorithmInferenceSpecificationContainerModelInputArgs.builder()
.dataInputConfig("string")
.build())
.nearestModelName("string")
.productId("string")
.build())
.supportedContentTypes("string")
.supportedRealtimeInferenceInstanceTypes("string")
.supportedResponseMimeTypes("string")
.supportedTransformInstanceTypes("string")
.build())
.region("string")
.tags(Map.of("string", "string"))
.timeouts(AlgorithmTimeoutsArgs.builder()
.create("string")
.delete("string")
.build())
.validationSpecification(AlgorithmValidationSpecificationArgs.builder()
.validationProfiles(AlgorithmValidationSpecificationValidationProfilesArgs.builder()
.profileName("string")
.trainingJobDefinition(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionArgs.builder()
.inputDataConfigs(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigArgs.builder()
.channelName("string")
.dataSource(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceArgs.builder()
.fileSystemDataSource(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceFileSystemDataSourceArgs.builder()
.directoryPath("string")
.fileSystemAccessMode("string")
.fileSystemId("string")
.fileSystemType("string")
.build())
.s3DataSource(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceArgs.builder()
.s3DataType("string")
.s3Uri("string")
.attributeNames("string")
.hubAccessConfig(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceHubAccessConfigArgs.builder()
.hubContentArn("string")
.build())
.instanceGroupNames("string")
.modelAccessConfig(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceModelAccessConfigArgs.builder()
.acceptEula(false)
.build())
.s3DataDistributionType("string")
.build())
.build())
.compressionType("string")
.contentType("string")
.inputMode("string")
.recordWrapperType("string")
.shuffleConfig(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigShuffleConfigArgs.builder()
.seed(0)
.build())
.build())
.outputDataConfig(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionOutputDataConfigArgs.builder()
.s3OutputPath("string")
.compressionType("string")
.kmsKeyId("string")
.build())
.resourceConfig(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigArgs.builder()
.instanceCount(0)
.instanceGroups(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstanceGroupArgs.builder()
.instanceCount(0)
.instanceGroupName("string")
.instanceType("string")
.build())
.instancePlacementConfig(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstancePlacementConfigArgs.builder()
.enableMultipleJobs(false)
.placementSpecifications(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstancePlacementConfigPlacementSpecificationArgs.builder()
.instanceCount(0)
.ultraServerId("string")
.build())
.build())
.instanceType("string")
.keepAlivePeriodInSeconds(0)
.trainingPlanArn("string")
.volumeKmsKeyId("string")
.volumeSizeInGb(0)
.build())
.stoppingCondition(AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionStoppingConditionArgs.builder()
.maxPendingTimeInSeconds(0)
.maxRuntimeInSeconds(0)
.maxWaitTimeInSeconds(0)
.build())
.trainingInputMode("string")
.hyperParameters(Map.of("string", "string"))
.build())
.transformJobDefinition(AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionArgs.builder()
.transformInput(AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputArgs.builder()
.dataSource(AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceArgs.builder()
.s3DataSource(AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceS3DataSourceArgs.builder()
.s3DataType("string")
.s3Uri("string")
.build())
.build())
.compressionType("string")
.contentType("string")
.splitType("string")
.build())
.transformOutput(AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformOutputArgs.builder()
.s3OutputPath("string")
.accept("string")
.assembleWith("string")
.kmsKeyId("string")
.build())
.transformResources(AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformResourcesArgs.builder()
.instanceCount(0)
.instanceType("string")
.transformAmiVersion("string")
.volumeKmsKeyId("string")
.build())
.batchStrategy("string")
.environment(Map.of("string", "string"))
.maxConcurrentTransforms(0)
.maxPayloadInMb(0)
.build())
.build())
.validationRole("string")
.build())
.build());
algorithm_resource = aws.sagemaker.Algorithm("algorithmResource",
algorithm_name="string",
training_specification={
"supported_training_instance_types": ["string"],
"training_channels": [{
"name": "string",
"supported_content_types": ["string"],
"supported_input_modes": ["string"],
"description": "string",
"is_required": False,
"supported_compression_types": ["string"],
}],
"training_image": "string",
"additional_s3_data_source": {
"s3_data_type": "string",
"s3_uri": "string",
"compression_type": "string",
"etag": "string",
},
"metric_definitions": [{
"name": "string",
"regex": "string",
}],
"supported_hyper_parameters": [{
"name": "string",
"type": "string",
"default_value": "string",
"description": "string",
"is_required": False,
"is_tunable": False,
"range": {
"categorical_parameter_range_specification": {
"values": ["string"],
},
"continuous_parameter_range_specification": {
"max_value": "string",
"min_value": "string",
},
"integer_parameter_range_specification": {
"max_value": "string",
"min_value": "string",
},
},
}],
"supported_tuning_job_objective_metrics": [{
"metric_name": "string",
"type": "string",
}],
"supports_distributed_training": False,
"training_image_digest": "string",
},
algorithm_description="string",
certify_for_marketplace=False,
inference_specification={
"containers": [{
"additional_s3_data_source": {
"s3_data_type": "string",
"s3_uri": "string",
"compression_type": "string",
"etag": "string",
},
"base_model": {
"hub_content_name": "string",
"hub_content_version": "string",
"recipe_name": "string",
},
"container_hostname": "string",
"environment": {
"string": "string",
},
"framework": "string",
"framework_version": "string",
"image": "string",
"image_digest": "string",
"is_checkpoint": False,
"model_data_etag": "string",
"model_data_source": {
"s3_data_source": {
"compression_type": "string",
"s3_data_type": "string",
"s3_uri": "string",
"etag": "string",
"hub_access_config": {
"hub_content_arn": "string",
},
"manifest_etag": "string",
"manifest_s3_uri": "string",
"model_access_config": {
"accept_eula": False,
},
},
},
"model_data_url": "string",
"model_input": {
"data_input_config": "string",
},
"nearest_model_name": "string",
"product_id": "string",
}],
"supported_content_types": ["string"],
"supported_realtime_inference_instance_types": ["string"],
"supported_response_mime_types": ["string"],
"supported_transform_instance_types": ["string"],
},
region="string",
tags={
"string": "string",
},
timeouts={
"create": "string",
"delete": "string",
},
validation_specification={
"validation_profiles": {
"profile_name": "string",
"training_job_definition": {
"input_data_configs": [{
"channel_name": "string",
"data_source": {
"file_system_data_source": {
"directory_path": "string",
"file_system_access_mode": "string",
"file_system_id": "string",
"file_system_type": "string",
},
"s3_data_source": {
"s3_data_type": "string",
"s3_uri": "string",
"attribute_names": ["string"],
"hub_access_config": {
"hub_content_arn": "string",
},
"instance_group_names": ["string"],
"model_access_config": {
"accept_eula": False,
},
"s3_data_distribution_type": "string",
},
},
"compression_type": "string",
"content_type": "string",
"input_mode": "string",
"record_wrapper_type": "string",
"shuffle_config": {
"seed": 0,
},
}],
"output_data_config": {
"s3_output_path": "string",
"compression_type": "string",
"kms_key_id": "string",
},
"resource_config": {
"instance_count": 0,
"instance_groups": [{
"instance_count": 0,
"instance_group_name": "string",
"instance_type": "string",
}],
"instance_placement_config": {
"enable_multiple_jobs": False,
"placement_specifications": [{
"instance_count": 0,
"ultra_server_id": "string",
}],
},
"instance_type": "string",
"keep_alive_period_in_seconds": 0,
"training_plan_arn": "string",
"volume_kms_key_id": "string",
"volume_size_in_gb": 0,
},
"stopping_condition": {
"max_pending_time_in_seconds": 0,
"max_runtime_in_seconds": 0,
"max_wait_time_in_seconds": 0,
},
"training_input_mode": "string",
"hyper_parameters": {
"string": "string",
},
},
"transform_job_definition": {
"transform_input": {
"data_source": {
"s3_data_source": {
"s3_data_type": "string",
"s3_uri": "string",
},
},
"compression_type": "string",
"content_type": "string",
"split_type": "string",
},
"transform_output": {
"s3_output_path": "string",
"accept": "string",
"assemble_with": "string",
"kms_key_id": "string",
},
"transform_resources": {
"instance_count": 0,
"instance_type": "string",
"transform_ami_version": "string",
"volume_kms_key_id": "string",
},
"batch_strategy": "string",
"environment": {
"string": "string",
},
"max_concurrent_transforms": 0,
"max_payload_in_mb": 0,
},
},
"validation_role": "string",
})
const algorithmResource = new aws.sagemaker.Algorithm("algorithmResource", {
algorithmName: "string",
trainingSpecification: {
supportedTrainingInstanceTypes: ["string"],
trainingChannels: [{
name: "string",
supportedContentTypes: ["string"],
supportedInputModes: ["string"],
description: "string",
isRequired: false,
supportedCompressionTypes: ["string"],
}],
trainingImage: "string",
additionalS3DataSource: {
s3DataType: "string",
s3Uri: "string",
compressionType: "string",
etag: "string",
},
metricDefinitions: [{
name: "string",
regex: "string",
}],
supportedHyperParameters: [{
name: "string",
type: "string",
defaultValue: "string",
description: "string",
isRequired: false,
isTunable: false,
range: {
categoricalParameterRangeSpecification: {
values: ["string"],
},
continuousParameterRangeSpecification: {
maxValue: "string",
minValue: "string",
},
integerParameterRangeSpecification: {
maxValue: "string",
minValue: "string",
},
},
}],
supportedTuningJobObjectiveMetrics: [{
metricName: "string",
type: "string",
}],
supportsDistributedTraining: false,
trainingImageDigest: "string",
},
algorithmDescription: "string",
certifyForMarketplace: false,
inferenceSpecification: {
containers: [{
additionalS3DataSource: {
s3DataType: "string",
s3Uri: "string",
compressionType: "string",
etag: "string",
},
baseModel: {
hubContentName: "string",
hubContentVersion: "string",
recipeName: "string",
},
containerHostname: "string",
environment: {
string: "string",
},
framework: "string",
frameworkVersion: "string",
image: "string",
imageDigest: "string",
isCheckpoint: false,
modelDataEtag: "string",
modelDataSource: {
s3DataSource: {
compressionType: "string",
s3DataType: "string",
s3Uri: "string",
etag: "string",
hubAccessConfig: {
hubContentArn: "string",
},
manifestEtag: "string",
manifestS3Uri: "string",
modelAccessConfig: {
acceptEula: false,
},
},
},
modelDataUrl: "string",
modelInput: {
dataInputConfig: "string",
},
nearestModelName: "string",
productId: "string",
}],
supportedContentTypes: ["string"],
supportedRealtimeInferenceInstanceTypes: ["string"],
supportedResponseMimeTypes: ["string"],
supportedTransformInstanceTypes: ["string"],
},
region: "string",
tags: {
string: "string",
},
timeouts: {
create: "string",
"delete": "string",
},
validationSpecification: {
validationProfiles: {
profileName: "string",
trainingJobDefinition: {
inputDataConfigs: [{
channelName: "string",
dataSource: {
fileSystemDataSource: {
directoryPath: "string",
fileSystemAccessMode: "string",
fileSystemId: "string",
fileSystemType: "string",
},
s3DataSource: {
s3DataType: "string",
s3Uri: "string",
attributeNames: ["string"],
hubAccessConfig: {
hubContentArn: "string",
},
instanceGroupNames: ["string"],
modelAccessConfig: {
acceptEula: false,
},
s3DataDistributionType: "string",
},
},
compressionType: "string",
contentType: "string",
inputMode: "string",
recordWrapperType: "string",
shuffleConfig: {
seed: 0,
},
}],
outputDataConfig: {
s3OutputPath: "string",
compressionType: "string",
kmsKeyId: "string",
},
resourceConfig: {
instanceCount: 0,
instanceGroups: [{
instanceCount: 0,
instanceGroupName: "string",
instanceType: "string",
}],
instancePlacementConfig: {
enableMultipleJobs: false,
placementSpecifications: [{
instanceCount: 0,
ultraServerId: "string",
}],
},
instanceType: "string",
keepAlivePeriodInSeconds: 0,
trainingPlanArn: "string",
volumeKmsKeyId: "string",
volumeSizeInGb: 0,
},
stoppingCondition: {
maxPendingTimeInSeconds: 0,
maxRuntimeInSeconds: 0,
maxWaitTimeInSeconds: 0,
},
trainingInputMode: "string",
hyperParameters: {
string: "string",
},
},
transformJobDefinition: {
transformInput: {
dataSource: {
s3DataSource: {
s3DataType: "string",
s3Uri: "string",
},
},
compressionType: "string",
contentType: "string",
splitType: "string",
},
transformOutput: {
s3OutputPath: "string",
accept: "string",
assembleWith: "string",
kmsKeyId: "string",
},
transformResources: {
instanceCount: 0,
instanceType: "string",
transformAmiVersion: "string",
volumeKmsKeyId: "string",
},
batchStrategy: "string",
environment: {
string: "string",
},
maxConcurrentTransforms: 0,
maxPayloadInMb: 0,
},
},
validationRole: "string",
},
});
type: aws:sagemaker:Algorithm
properties:
algorithmDescription: string
algorithmName: string
certifyForMarketplace: false
inferenceSpecification:
containers:
- additionalS3DataSource:
compressionType: string
etag: string
s3DataType: string
s3Uri: string
baseModel:
hubContentName: string
hubContentVersion: string
recipeName: string
containerHostname: string
environment:
string: string
framework: string
frameworkVersion: string
image: string
imageDigest: string
isCheckpoint: false
modelDataEtag: string
modelDataSource:
s3DataSource:
compressionType: string
etag: string
hubAccessConfig:
hubContentArn: string
manifestEtag: string
manifestS3Uri: string
modelAccessConfig:
acceptEula: false
s3DataType: string
s3Uri: string
modelDataUrl: string
modelInput:
dataInputConfig: string
nearestModelName: string
productId: string
supportedContentTypes:
- string
supportedRealtimeInferenceInstanceTypes:
- string
supportedResponseMimeTypes:
- string
supportedTransformInstanceTypes:
- string
region: string
tags:
string: string
timeouts:
create: string
delete: string
trainingSpecification:
additionalS3DataSource:
compressionType: string
etag: string
s3DataType: string
s3Uri: string
metricDefinitions:
- name: string
regex: string
supportedHyperParameters:
- defaultValue: string
description: string
isRequired: false
isTunable: false
name: string
range:
categoricalParameterRangeSpecification:
values:
- string
continuousParameterRangeSpecification:
maxValue: string
minValue: string
integerParameterRangeSpecification:
maxValue: string
minValue: string
type: string
supportedTrainingInstanceTypes:
- string
supportedTuningJobObjectiveMetrics:
- metricName: string
type: string
supportsDistributedTraining: false
trainingChannels:
- description: string
isRequired: false
name: string
supportedCompressionTypes:
- string
supportedContentTypes:
- string
supportedInputModes:
- string
trainingImage: string
trainingImageDigest: string
validationSpecification:
validationProfiles:
profileName: string
trainingJobDefinition:
hyperParameters:
string: string
inputDataConfigs:
- channelName: string
compressionType: string
contentType: string
dataSource:
fileSystemDataSource:
directoryPath: string
fileSystemAccessMode: string
fileSystemId: string
fileSystemType: string
s3DataSource:
attributeNames:
- string
hubAccessConfig:
hubContentArn: string
instanceGroupNames:
- string
modelAccessConfig:
acceptEula: false
s3DataDistributionType: string
s3DataType: string
s3Uri: string
inputMode: string
recordWrapperType: string
shuffleConfig:
seed: 0
outputDataConfig:
compressionType: string
kmsKeyId: string
s3OutputPath: string
resourceConfig:
instanceCount: 0
instanceGroups:
- instanceCount: 0
instanceGroupName: string
instanceType: string
instancePlacementConfig:
enableMultipleJobs: false
placementSpecifications:
- instanceCount: 0
ultraServerId: string
instanceType: string
keepAlivePeriodInSeconds: 0
trainingPlanArn: string
volumeKmsKeyId: string
volumeSizeInGb: 0
stoppingCondition:
maxPendingTimeInSeconds: 0
maxRuntimeInSeconds: 0
maxWaitTimeInSeconds: 0
trainingInputMode: string
transformJobDefinition:
batchStrategy: string
environment:
string: string
maxConcurrentTransforms: 0
maxPayloadInMb: 0
transformInput:
compressionType: string
contentType: string
dataSource:
s3DataSource:
s3DataType: string
s3Uri: string
splitType: string
transformOutput:
accept: string
assembleWith: string
kmsKeyId: string
s3OutputPath: string
transformResources:
instanceCount: 0
instanceType: string
transformAmiVersion: string
volumeKmsKeyId: string
validationRole: string
Algorithm Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Algorithm resource accepts the following input properties:
- Algorithm
Name string - Name of the algorithm.
- Training
Specification AlgorithmTraining Specification - Configuration for training jobs that use this algorithm. See Training Specification.
- Algorithm
Description string - Description of the algorithm.
- Certify
For boolMarketplace - Whether to certify the algorithm for AWS Marketplace.
- Inference
Specification AlgorithmInference Specification - Configuration for inference jobs that use this algorithm. See Inference Specification.
- Region string
- Region where this resource is managed. Defaults to the Region set in the provider configuration.
- Dictionary<string, string>
- Map of tags to assign to the resource.
- Timeouts
Algorithm
Timeouts - Validation
Specification AlgorithmValidation Specification - Configuration used to validate the algorithm. See Validation Specification.
- Algorithm
Name string - Name of the algorithm.
- Training
Specification AlgorithmTraining Specification Args - Configuration for training jobs that use this algorithm. See Training Specification.
- Algorithm
Description string - Description of the algorithm.
- Certify
For boolMarketplace - Whether to certify the algorithm for AWS Marketplace.
- Inference
Specification AlgorithmInference Specification Args - Configuration for inference jobs that use this algorithm. See Inference Specification.
- Region string
- Region where this resource is managed. Defaults to the Region set in the provider configuration.
- map[string]string
- Map of tags to assign to the resource.
- Timeouts
Algorithm
Timeouts Args - Validation
Specification AlgorithmValidation Specification Args - Configuration used to validate the algorithm. See Validation Specification.
- algorithm
Name String - Name of the algorithm.
- training
Specification AlgorithmTraining Specification - Configuration for training jobs that use this algorithm. See Training Specification.
- algorithm
Description String - Description of the algorithm.
- certify
For BooleanMarketplace - Whether to certify the algorithm for AWS Marketplace.
- inference
Specification AlgorithmInference Specification - Configuration for inference jobs that use this algorithm. See Inference Specification.
- region String
- Region where this resource is managed. Defaults to the Region set in the provider configuration.
- Map<String,String>
- Map of tags to assign to the resource.
- timeouts
Algorithm
Timeouts - validation
Specification AlgorithmValidation Specification - Configuration used to validate the algorithm. See Validation Specification.
- algorithm
Name string - Name of the algorithm.
- training
Specification AlgorithmTraining Specification - Configuration for training jobs that use this algorithm. See Training Specification.
- algorithm
Description string - Description of the algorithm.
- certify
For booleanMarketplace - Whether to certify the algorithm for AWS Marketplace.
- inference
Specification AlgorithmInference Specification - Configuration for inference jobs that use this algorithm. See Inference Specification.
- region string
- Region where this resource is managed. Defaults to the Region set in the provider configuration.
- {[key: string]: string}
- Map of tags to assign to the resource.
- timeouts
Algorithm
Timeouts - validation
Specification AlgorithmValidation Specification - Configuration used to validate the algorithm. See Validation Specification.
- algorithm_
name str - Name of the algorithm.
- training_
specification AlgorithmTraining Specification Args - Configuration for training jobs that use this algorithm. See Training Specification.
- algorithm_
description str - Description of the algorithm.
- certify_
for_ boolmarketplace - Whether to certify the algorithm for AWS Marketplace.
- inference_
specification AlgorithmInference Specification Args - Configuration for inference jobs that use this algorithm. See Inference Specification.
- region str
- Region where this resource is managed. Defaults to the Region set in the provider configuration.
- Mapping[str, str]
- Map of tags to assign to the resource.
- timeouts
Algorithm
Timeouts Args - validation_
specification AlgorithmValidation Specification Args - Configuration used to validate the algorithm. See Validation Specification.
- algorithm
Name String - Name of the algorithm.
- training
Specification Property Map - Configuration for training jobs that use this algorithm. See Training Specification.
- algorithm
Description String - Description of the algorithm.
- certify
For BooleanMarketplace - Whether to certify the algorithm for AWS Marketplace.
- inference
Specification Property Map - Configuration for inference jobs that use this algorithm. See Inference Specification.
- region String
- Region where this resource is managed. Defaults to the Region set in the provider configuration.
- Map<String>
- Map of tags to assign to the resource.
- timeouts Property Map
- validation
Specification Property Map - Configuration used to validate the algorithm. See Validation Specification.
Outputs
All input properties are implicitly available as output properties. Additionally, the Algorithm resource produces the following output properties:
- Algorithm
Status string - Status of the algorithm.
- Arn string
- ARN of the algorithm.
- Creation
Time string - Time when the algorithm was created, in RFC3339 format.
- Id string
- The provider-assigned unique ID for this managed resource.
- Product
Id string - AWS Marketplace product ID associated with the algorithm.
- Dictionary<string, string>
- Map of tags assigned to the resource, including tags inherited from the provider
defaultTagsconfiguration block.
- Algorithm
Status string - Status of the algorithm.
- Arn string
- ARN of the algorithm.
- Creation
Time string - Time when the algorithm was created, in RFC3339 format.
- Id string
- The provider-assigned unique ID for this managed resource.
- Product
Id string - AWS Marketplace product ID associated with the algorithm.
- map[string]string
- Map of tags assigned to the resource, including tags inherited from the provider
defaultTagsconfiguration block.
- algorithm
Status String - Status of the algorithm.
- arn String
- ARN of the algorithm.
- creation
Time String - Time when the algorithm was created, in RFC3339 format.
- id String
- The provider-assigned unique ID for this managed resource.
- product
Id String - AWS Marketplace product ID associated with the algorithm.
- Map<String,String>
- Map of tags assigned to the resource, including tags inherited from the provider
defaultTagsconfiguration block.
- algorithm
Status string - Status of the algorithm.
- arn string
- ARN of the algorithm.
- creation
Time string - Time when the algorithm was created, in RFC3339 format.
- id string
- The provider-assigned unique ID for this managed resource.
- product
Id string - AWS Marketplace product ID associated with the algorithm.
- {[key: string]: string}
- Map of tags assigned to the resource, including tags inherited from the provider
defaultTagsconfiguration block.
- algorithm_
status str - Status of the algorithm.
- arn str
- ARN of the algorithm.
- creation_
time str - Time when the algorithm was created, in RFC3339 format.
- id str
- The provider-assigned unique ID for this managed resource.
- product_
id str - AWS Marketplace product ID associated with the algorithm.
- Mapping[str, str]
- Map of tags assigned to the resource, including tags inherited from the provider
defaultTagsconfiguration block.
- algorithm
Status String - Status of the algorithm.
- arn String
- ARN of the algorithm.
- creation
Time String - Time when the algorithm was created, in RFC3339 format.
- id String
- The provider-assigned unique ID for this managed resource.
- product
Id String - AWS Marketplace product ID associated with the algorithm.
- Map<String>
- Map of tags assigned to the resource, including tags inherited from the provider
defaultTagsconfiguration block.
Look up Existing Algorithm Resource
Get an existing Algorithm resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: AlgorithmState, opts?: CustomResourceOptions): Algorithm@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
algorithm_description: Optional[str] = None,
algorithm_name: Optional[str] = None,
algorithm_status: Optional[str] = None,
arn: Optional[str] = None,
certify_for_marketplace: Optional[bool] = None,
creation_time: Optional[str] = None,
inference_specification: Optional[AlgorithmInferenceSpecificationArgs] = None,
product_id: Optional[str] = None,
region: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
tags_all: Optional[Mapping[str, str]] = None,
timeouts: Optional[AlgorithmTimeoutsArgs] = None,
training_specification: Optional[AlgorithmTrainingSpecificationArgs] = None,
validation_specification: Optional[AlgorithmValidationSpecificationArgs] = None) -> Algorithmfunc GetAlgorithm(ctx *Context, name string, id IDInput, state *AlgorithmState, opts ...ResourceOption) (*Algorithm, error)public static Algorithm Get(string name, Input<string> id, AlgorithmState? state, CustomResourceOptions? opts = null)public static Algorithm get(String name, Output<String> id, AlgorithmState state, CustomResourceOptions options)resources: _: type: aws:sagemaker:Algorithm get: id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Algorithm
Description string - Description of the algorithm.
- Algorithm
Name string - Name of the algorithm.
- Algorithm
Status string - Status of the algorithm.
- Arn string
- ARN of the algorithm.
- Certify
For boolMarketplace - Whether to certify the algorithm for AWS Marketplace.
- Creation
Time string - Time when the algorithm was created, in RFC3339 format.
- Inference
Specification AlgorithmInference Specification - Configuration for inference jobs that use this algorithm. See Inference Specification.
- Product
Id string - AWS Marketplace product ID associated with the algorithm.
- Region string
- Region where this resource is managed. Defaults to the Region set in the provider configuration.
- Dictionary<string, string>
- Map of tags to assign to the resource.
- Dictionary<string, string>
- Map of tags assigned to the resource, including tags inherited from the provider
defaultTagsconfiguration block. - Timeouts
Algorithm
Timeouts - Training
Specification AlgorithmTraining Specification - Configuration for training jobs that use this algorithm. See Training Specification.
- Validation
Specification AlgorithmValidation Specification - Configuration used to validate the algorithm. See Validation Specification.
- Algorithm
Description string - Description of the algorithm.
- Algorithm
Name string - Name of the algorithm.
- Algorithm
Status string - Status of the algorithm.
- Arn string
- ARN of the algorithm.
- Certify
For boolMarketplace - Whether to certify the algorithm for AWS Marketplace.
- Creation
Time string - Time when the algorithm was created, in RFC3339 format.
- Inference
Specification AlgorithmInference Specification Args - Configuration for inference jobs that use this algorithm. See Inference Specification.
- Product
Id string - AWS Marketplace product ID associated with the algorithm.
- Region string
- Region where this resource is managed. Defaults to the Region set in the provider configuration.
- map[string]string
- Map of tags to assign to the resource.
- map[string]string
- Map of tags assigned to the resource, including tags inherited from the provider
defaultTagsconfiguration block. - Timeouts
Algorithm
Timeouts Args - Training
Specification AlgorithmTraining Specification Args - Configuration for training jobs that use this algorithm. See Training Specification.
- Validation
Specification AlgorithmValidation Specification Args - Configuration used to validate the algorithm. See Validation Specification.
- algorithm
Description String - Description of the algorithm.
- algorithm
Name String - Name of the algorithm.
- algorithm
Status String - Status of the algorithm.
- arn String
- ARN of the algorithm.
- certify
For BooleanMarketplace - Whether to certify the algorithm for AWS Marketplace.
- creation
Time String - Time when the algorithm was created, in RFC3339 format.
- inference
Specification AlgorithmInference Specification - Configuration for inference jobs that use this algorithm. See Inference Specification.
- product
Id String - AWS Marketplace product ID associated with the algorithm.
- region String
- Region where this resource is managed. Defaults to the Region set in the provider configuration.
- Map<String,String>
- Map of tags to assign to the resource.
- Map<String,String>
- Map of tags assigned to the resource, including tags inherited from the provider
defaultTagsconfiguration block. - timeouts
Algorithm
Timeouts - training
Specification AlgorithmTraining Specification - Configuration for training jobs that use this algorithm. See Training Specification.
- validation
Specification AlgorithmValidation Specification - Configuration used to validate the algorithm. See Validation Specification.
- algorithm
Description string - Description of the algorithm.
- algorithm
Name string - Name of the algorithm.
- algorithm
Status string - Status of the algorithm.
- arn string
- ARN of the algorithm.
- certify
For booleanMarketplace - Whether to certify the algorithm for AWS Marketplace.
- creation
Time string - Time when the algorithm was created, in RFC3339 format.
- inference
Specification AlgorithmInference Specification - Configuration for inference jobs that use this algorithm. See Inference Specification.
- product
Id string - AWS Marketplace product ID associated with the algorithm.
- region string
- Region where this resource is managed. Defaults to the Region set in the provider configuration.
- {[key: string]: string}
- Map of tags to assign to the resource.
- {[key: string]: string}
- Map of tags assigned to the resource, including tags inherited from the provider
defaultTagsconfiguration block. - timeouts
Algorithm
Timeouts - training
Specification AlgorithmTraining Specification - Configuration for training jobs that use this algorithm. See Training Specification.
- validation
Specification AlgorithmValidation Specification - Configuration used to validate the algorithm. See Validation Specification.
- algorithm_
description str - Description of the algorithm.
- algorithm_
name str - Name of the algorithm.
- algorithm_
status str - Status of the algorithm.
- arn str
- ARN of the algorithm.
- certify_
for_ boolmarketplace - Whether to certify the algorithm for AWS Marketplace.
- creation_
time str - Time when the algorithm was created, in RFC3339 format.
- inference_
specification AlgorithmInference Specification Args - Configuration for inference jobs that use this algorithm. See Inference Specification.
- product_
id str - AWS Marketplace product ID associated with the algorithm.
- region str
- Region where this resource is managed. Defaults to the Region set in the provider configuration.
- Mapping[str, str]
- Map of tags to assign to the resource.
- Mapping[str, str]
- Map of tags assigned to the resource, including tags inherited from the provider
defaultTagsconfiguration block. - timeouts
Algorithm
Timeouts Args - training_
specification AlgorithmTraining Specification Args - Configuration for training jobs that use this algorithm. See Training Specification.
- validation_
specification AlgorithmValidation Specification Args - Configuration used to validate the algorithm. See Validation Specification.
- algorithm
Description String - Description of the algorithm.
- algorithm
Name String - Name of the algorithm.
- algorithm
Status String - Status of the algorithm.
- arn String
- ARN of the algorithm.
- certify
For BooleanMarketplace - Whether to certify the algorithm for AWS Marketplace.
- creation
Time String - Time when the algorithm was created, in RFC3339 format.
- inference
Specification Property Map - Configuration for inference jobs that use this algorithm. See Inference Specification.
- product
Id String - AWS Marketplace product ID associated with the algorithm.
- region String
- Region where this resource is managed. Defaults to the Region set in the provider configuration.
- Map<String>
- Map of tags to assign to the resource.
- Map<String>
- Map of tags assigned to the resource, including tags inherited from the provider
defaultTagsconfiguration block. - timeouts Property Map
- training
Specification Property Map - Configuration for training jobs that use this algorithm. See Training Specification.
- validation
Specification Property Map - Configuration used to validate the algorithm. See Validation Specification.
Supporting Types
AlgorithmInferenceSpecification, AlgorithmInferenceSpecificationArgs
- Containers
List<Algorithm
Inference Specification Container> - List of container definitions for inference.
- Supported
Content List<string>Types - Supported MIME types for inference requests.
- Supported
Realtime List<string>Inference Instance Types - Instance types supported for real-time inference.
- Supported
Response List<string>Mime Types - Supported MIME types for inference responses.
- Supported
Transform List<string>Instance Types - Instance types supported for batch transform.
- Containers
[]Algorithm
Inference Specification Container - List of container definitions for inference.
- Supported
Content []stringTypes - Supported MIME types for inference requests.
- Supported
Realtime []stringInference Instance Types - Instance types supported for real-time inference.
- Supported
Response []stringMime Types - Supported MIME types for inference responses.
- Supported
Transform []stringInstance Types - Instance types supported for batch transform.
- containers
List<Algorithm
Inference Specification Container> - List of container definitions for inference.
- supported
Content List<String>Types - Supported MIME types for inference requests.
- supported
Realtime List<String>Inference Instance Types - Instance types supported for real-time inference.
- supported
Response List<String>Mime Types - Supported MIME types for inference responses.
- supported
Transform List<String>Instance Types - Instance types supported for batch transform.
- containers
Algorithm
Inference Specification Container[] - List of container definitions for inference.
- supported
Content string[]Types - Supported MIME types for inference requests.
- supported
Realtime string[]Inference Instance Types - Instance types supported for real-time inference.
- supported
Response string[]Mime Types - Supported MIME types for inference responses.
- supported
Transform string[]Instance Types - Instance types supported for batch transform.
- containers
Sequence[Algorithm
Inference Specification Container] - List of container definitions for inference.
- supported_
content_ Sequence[str]types - Supported MIME types for inference requests.
- supported_
realtime_ Sequence[str]inference_ instance_ types - Instance types supported for real-time inference.
- supported_
response_ Sequence[str]mime_ types - Supported MIME types for inference responses.
- supported_
transform_ Sequence[str]instance_ types - Instance types supported for batch transform.
- containers List<Property Map>
- List of container definitions for inference.
- supported
Content List<String>Types - Supported MIME types for inference requests.
- supported
Realtime List<String>Inference Instance Types - Instance types supported for real-time inference.
- supported
Response List<String>Mime Types - Supported MIME types for inference responses.
- supported
Transform List<String>Instance Types - Instance types supported for batch transform.
AlgorithmInferenceSpecificationContainer, AlgorithmInferenceSpecificationContainerArgs
- Additional
S3Data AlgorithmSource Inference Specification Container Additional S3Data Source - Additional model data to make available to the container. See Additional S3 Data Source.
- Base
Model AlgorithmInference Specification Container Base Model - Base model information for the container. See Base Model.
- Container
Hostname string - DNS host name for the container.
- Environment Dictionary<string, string>
- Environment variables to pass to the container.
- Framework string
- Machine learning framework in the container image.
- Framework
Version string - Framework version in the container image.
- Image string
- Container image URI.
- Image
Digest string - Digest of the container image.
- Is
Checkpoint bool - Whether the container is used as a checkpoint container.
- Model
Data stringEtag - ETag for
modelDataUrl. - Model
Data AlgorithmSource Inference Specification Container Model Data Source - Source of model data for the container. See Model Data Source.
- Model
Data stringUrl - S3 or HTTPS URL of the model artifacts.
- Model
Input AlgorithmInference Specification Container Model Input - Additional model input configuration. See Model Input.
- Nearest
Model stringName - Name of a pre-existing model nearest to the one being created.
- Product
Id string - AWS Marketplace product ID.
- Additional
S3Data AlgorithmSource Inference Specification Container Additional S3Data Source - Additional model data to make available to the container. See Additional S3 Data Source.
- Base
Model AlgorithmInference Specification Container Base Model - Base model information for the container. See Base Model.
- Container
Hostname string - DNS host name for the container.
- Environment map[string]string
- Environment variables to pass to the container.
- Framework string
- Machine learning framework in the container image.
- Framework
Version string - Framework version in the container image.
- Image string
- Container image URI.
- Image
Digest string - Digest of the container image.
- Is
Checkpoint bool - Whether the container is used as a checkpoint container.
- Model
Data stringEtag - ETag for
modelDataUrl. - Model
Data AlgorithmSource Inference Specification Container Model Data Source - Source of model data for the container. See Model Data Source.
- Model
Data stringUrl - S3 or HTTPS URL of the model artifacts.
- Model
Input AlgorithmInference Specification Container Model Input - Additional model input configuration. See Model Input.
- Nearest
Model stringName - Name of a pre-existing model nearest to the one being created.
- Product
Id string - AWS Marketplace product ID.
- additional
S3Data AlgorithmSource Inference Specification Container Additional S3Data Source - Additional model data to make available to the container. See Additional S3 Data Source.
- base
Model AlgorithmInference Specification Container Base Model - Base model information for the container. See Base Model.
- container
Hostname String - DNS host name for the container.
- environment Map<String,String>
- Environment variables to pass to the container.
- framework String
- Machine learning framework in the container image.
- framework
Version String - Framework version in the container image.
- image String
- Container image URI.
- image
Digest String - Digest of the container image.
- is
Checkpoint Boolean - Whether the container is used as a checkpoint container.
- model
Data StringEtag - ETag for
modelDataUrl. - model
Data AlgorithmSource Inference Specification Container Model Data Source - Source of model data for the container. See Model Data Source.
- model
Data StringUrl - S3 or HTTPS URL of the model artifacts.
- model
Input AlgorithmInference Specification Container Model Input - Additional model input configuration. See Model Input.
- nearest
Model StringName - Name of a pre-existing model nearest to the one being created.
- product
Id String - AWS Marketplace product ID.
- additional
S3Data AlgorithmSource Inference Specification Container Additional S3Data Source - Additional model data to make available to the container. See Additional S3 Data Source.
- base
Model AlgorithmInference Specification Container Base Model - Base model information for the container. See Base Model.
- container
Hostname string - DNS host name for the container.
- environment {[key: string]: string}
- Environment variables to pass to the container.
- framework string
- Machine learning framework in the container image.
- framework
Version string - Framework version in the container image.
- image string
- Container image URI.
- image
Digest string - Digest of the container image.
- is
Checkpoint boolean - Whether the container is used as a checkpoint container.
- model
Data stringEtag - ETag for
modelDataUrl. - model
Data AlgorithmSource Inference Specification Container Model Data Source - Source of model data for the container. See Model Data Source.
- model
Data stringUrl - S3 or HTTPS URL of the model artifacts.
- model
Input AlgorithmInference Specification Container Model Input - Additional model input configuration. See Model Input.
- nearest
Model stringName - Name of a pre-existing model nearest to the one being created.
- product
Id string - AWS Marketplace product ID.
- additional_
s3_ Algorithmdata_ source Inference Specification Container Additional S3Data Source - Additional model data to make available to the container. See Additional S3 Data Source.
- base_
model AlgorithmInference Specification Container Base Model - Base model information for the container. See Base Model.
- container_
hostname str - DNS host name for the container.
- environment Mapping[str, str]
- Environment variables to pass to the container.
- framework str
- Machine learning framework in the container image.
- framework_
version str - Framework version in the container image.
- image str
- Container image URI.
- image_
digest str - Digest of the container image.
- is_
checkpoint bool - Whether the container is used as a checkpoint container.
- model_
data_ stretag - ETag for
modelDataUrl. - model_
data_ Algorithmsource Inference Specification Container Model Data Source - Source of model data for the container. See Model Data Source.
- model_
data_ strurl - S3 or HTTPS URL of the model artifacts.
- model_
input AlgorithmInference Specification Container Model Input - Additional model input configuration. See Model Input.
- nearest_
model_ strname - Name of a pre-existing model nearest to the one being created.
- product_
id str - AWS Marketplace product ID.
- additional
S3Data Property MapSource - Additional model data to make available to the container. See Additional S3 Data Source.
- base
Model Property Map - Base model information for the container. See Base Model.
- container
Hostname String - DNS host name for the container.
- environment Map<String>
- Environment variables to pass to the container.
- framework String
- Machine learning framework in the container image.
- framework
Version String - Framework version in the container image.
- image String
- Container image URI.
- image
Digest String - Digest of the container image.
- is
Checkpoint Boolean - Whether the container is used as a checkpoint container.
- model
Data StringEtag - ETag for
modelDataUrl. - model
Data Property MapSource - Source of model data for the container. See Model Data Source.
- model
Data StringUrl - S3 or HTTPS URL of the model artifacts.
- model
Input Property Map - Additional model input configuration. See Model Input.
- nearest
Model StringName - Name of a pre-existing model nearest to the one being created.
- product
Id String - AWS Marketplace product ID.
AlgorithmInferenceSpecificationContainerAdditionalS3DataSource, AlgorithmInferenceSpecificationContainerAdditionalS3DataSourceArgs
- S3Data
Type string - Type of additional S3 data.
- S3Uri string
- S3 or HTTPS URI for the additional data.
- Compression
Type string - Compression type for the data. Allowed values are:
NoneandGzip. - Etag string
- ETag of the S3 object.
- S3Data
Type string - Type of additional S3 data.
- S3Uri string
- S3 or HTTPS URI for the additional data.
- Compression
Type string - Compression type for the data. Allowed values are:
NoneandGzip. - Etag string
- ETag of the S3 object.
- s3Data
Type String - Type of additional S3 data.
- s3Uri String
- S3 or HTTPS URI for the additional data.
- compression
Type String - Compression type for the data. Allowed values are:
NoneandGzip. - etag String
- ETag of the S3 object.
- s3Data
Type string - Type of additional S3 data.
- s3Uri string
- S3 or HTTPS URI for the additional data.
- compression
Type string - Compression type for the data. Allowed values are:
NoneandGzip. - etag string
- ETag of the S3 object.
- s3_
data_ strtype - Type of additional S3 data.
- s3_
uri str - S3 or HTTPS URI for the additional data.
- compression_
type str - Compression type for the data. Allowed values are:
NoneandGzip. - etag str
- ETag of the S3 object.
- s3Data
Type String - Type of additional S3 data.
- s3Uri String
- S3 or HTTPS URI for the additional data.
- compression
Type String - Compression type for the data. Allowed values are:
NoneandGzip. - etag String
- ETag of the S3 object.
AlgorithmInferenceSpecificationContainerBaseModel, AlgorithmInferenceSpecificationContainerBaseModelArgs
- Hub
Content stringName - Name of the SageMaker AI Hub content.
- Hub
Content stringVersion - Version of the SageMaker AI Hub content.
- Recipe
Name string - Recipe name associated with the base model.
- Hub
Content stringName - Name of the SageMaker AI Hub content.
- Hub
Content stringVersion - Version of the SageMaker AI Hub content.
- Recipe
Name string - Recipe name associated with the base model.
- hub
Content StringName - Name of the SageMaker AI Hub content.
- hub
Content StringVersion - Version of the SageMaker AI Hub content.
- recipe
Name String - Recipe name associated with the base model.
- hub
Content stringName - Name of the SageMaker AI Hub content.
- hub
Content stringVersion - Version of the SageMaker AI Hub content.
- recipe
Name string - Recipe name associated with the base model.
- hub_
content_ strname - Name of the SageMaker AI Hub content.
- hub_
content_ strversion - Version of the SageMaker AI Hub content.
- recipe_
name str - Recipe name associated with the base model.
- hub
Content StringName - Name of the SageMaker AI Hub content.
- hub
Content StringVersion - Version of the SageMaker AI Hub content.
- recipe
Name String - Recipe name associated with the base model.
AlgorithmInferenceSpecificationContainerModelDataSource, AlgorithmInferenceSpecificationContainerModelDataSourceArgs
- S3Data
Source AlgorithmInference Specification Container Model Data Source S3Data Source - S3-backed model data source. See Model Data Source S3 Data Source.
- S3Data
Source AlgorithmInference Specification Container Model Data Source S3Data Source - S3-backed model data source. See Model Data Source S3 Data Source.
- s3Data
Source AlgorithmInference Specification Container Model Data Source S3Data Source - S3-backed model data source. See Model Data Source S3 Data Source.
- s3Data
Source AlgorithmInference Specification Container Model Data Source S3Data Source - S3-backed model data source. See Model Data Source S3 Data Source.
- s3_
data_ Algorithmsource Inference Specification Container Model Data Source S3Data Source - S3-backed model data source. See Model Data Source S3 Data Source.
- s3Data
Source Property Map - S3-backed model data source. See Model Data Source S3 Data Source.
AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSource, AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceArgs
- Compression
Type string - S3Data
Type string - S3Uri string
- Etag string
- Hub
Access AlgorithmConfig Inference Specification Container Model Data Source S3Data Source Hub Access Config - Manifest
Etag string - ETag of the manifest file.
- Manifest
S3Uri string - S3 or HTTPS URI of the manifest file.
- Model
Access AlgorithmConfig Inference Specification Container Model Data Source S3Data Source Model Access Config
- Compression
Type string - S3Data
Type string - S3Uri string
- Etag string
- Hub
Access AlgorithmConfig Inference Specification Container Model Data Source S3Data Source Hub Access Config - Manifest
Etag string - ETag of the manifest file.
- Manifest
S3Uri string - S3 or HTTPS URI of the manifest file.
- Model
Access AlgorithmConfig Inference Specification Container Model Data Source S3Data Source Model Access Config
- compression
Type String - s3Data
Type String - s3Uri String
- etag String
- hub
Access AlgorithmConfig Inference Specification Container Model Data Source S3Data Source Hub Access Config - manifest
Etag String - ETag of the manifest file.
- manifest
S3Uri String - S3 or HTTPS URI of the manifest file.
- model
Access AlgorithmConfig Inference Specification Container Model Data Source S3Data Source Model Access Config
- compression
Type string - s3Data
Type string - s3Uri string
- etag string
- hub
Access AlgorithmConfig Inference Specification Container Model Data Source S3Data Source Hub Access Config - manifest
Etag string - ETag of the manifest file.
- manifest
S3Uri string - S3 or HTTPS URI of the manifest file.
- model
Access AlgorithmConfig Inference Specification Container Model Data Source S3Data Source Model Access Config
- compression_
type str - s3_
data_ strtype - s3_
uri str - etag str
- hub_
access_ Algorithmconfig Inference Specification Container Model Data Source S3Data Source Hub Access Config - manifest_
etag str - ETag of the manifest file.
- manifest_
s3_ struri - S3 or HTTPS URI of the manifest file.
- model_
access_ Algorithmconfig Inference Specification Container Model Data Source S3Data Source Model Access Config
- compression
Type String - s3Data
Type String - s3Uri String
- etag String
- hub
Access Property MapConfig - manifest
Etag String - ETag of the manifest file.
- manifest
S3Uri String - S3 or HTTPS URI of the manifest file.
- model
Access Property MapConfig
AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceHubAccessConfig, AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceHubAccessConfigArgs
- Hub
Content stringArn - ARN of the SageMaker AI Hub content.
- Hub
Content stringArn - ARN of the SageMaker AI Hub content.
- hub
Content StringArn - ARN of the SageMaker AI Hub content.
- hub
Content stringArn - ARN of the SageMaker AI Hub content.
- hub_
content_ strarn - ARN of the SageMaker AI Hub content.
- hub
Content StringArn - ARN of the SageMaker AI Hub content.
AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceModelAccessConfig, AlgorithmInferenceSpecificationContainerModelDataSourceS3DataSourceModelAccessConfigArgs
- Accept
Eula bool - Whether to accept the model end-user license agreement.
- Accept
Eula bool - Whether to accept the model end-user license agreement.
- accept
Eula Boolean - Whether to accept the model end-user license agreement.
- accept
Eula boolean - Whether to accept the model end-user license agreement.
- accept_
eula bool - Whether to accept the model end-user license agreement.
- accept
Eula Boolean - Whether to accept the model end-user license agreement.
AlgorithmInferenceSpecificationContainerModelInput, AlgorithmInferenceSpecificationContainerModelInputArgs
- Data
Input stringConfig - Input configuration for the model.
- Data
Input stringConfig - Input configuration for the model.
- data
Input StringConfig - Input configuration for the model.
- data
Input stringConfig - Input configuration for the model.
- data_
input_ strconfig - Input configuration for the model.
- data
Input StringConfig - Input configuration for the model.
AlgorithmTimeouts, AlgorithmTimeoutsArgs
- Create string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- Delete string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Setting a timeout for a Delete operation is only applicable if changes are saved into state before the destroy operation occurs.
- Create string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- Delete string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Setting a timeout for a Delete operation is only applicable if changes are saved into state before the destroy operation occurs.
- create String
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- delete String
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Setting a timeout for a Delete operation is only applicable if changes are saved into state before the destroy operation occurs.
- create string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- delete string
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Setting a timeout for a Delete operation is only applicable if changes are saved into state before the destroy operation occurs.
- create str
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- delete str
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Setting a timeout for a Delete operation is only applicable if changes are saved into state before the destroy operation occurs.
- create String
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours).
- delete String
- A string that can be parsed as a duration consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Setting a timeout for a Delete operation is only applicable if changes are saved into state before the destroy operation occurs.
AlgorithmTrainingSpecification, AlgorithmTrainingSpecificationArgs
- Supported
Training List<string>Instance Types - Instance types supported for training.
- Training
Channels List<AlgorithmTraining Specification Training Channel> - List of channel definitions supported for training. See Training Channels.
- Training
Image string - Training image URI.
- Additional
S3Data AlgorithmSource Training Specification Additional S3Data Source - Additional training data to make available to the algorithm. See Additional S3 Data Source.
- Metric
Definitions List<AlgorithmTraining Specification Metric Definition> - List of metric definitions used to parse training logs. See Metric Definitions.
- Supported
Hyper List<AlgorithmParameters Training Specification Supported Hyper Parameter> - Hyperparameter definitions supported by the algorithm. See Supported Hyper Parameters.
- Supported
Tuning List<AlgorithmJob Objective Metrics Training Specification Supported Tuning Job Objective Metric> - Objective metrics supported for hyperparameter tuning jobs. See Supported Tuning Job Objective Metrics.
- Supports
Distributed boolTraining - Whether the algorithm supports distributed training.
- Training
Image stringDigest - Digest of the training image.
- Supported
Training []stringInstance Types - Instance types supported for training.
- Training
Channels []AlgorithmTraining Specification Training Channel - List of channel definitions supported for training. See Training Channels.
- Training
Image string - Training image URI.
- Additional
S3Data AlgorithmSource Training Specification Additional S3Data Source - Additional training data to make available to the algorithm. See Additional S3 Data Source.
- Metric
Definitions []AlgorithmTraining Specification Metric Definition - List of metric definitions used to parse training logs. See Metric Definitions.
- Supported
Hyper []AlgorithmParameters Training Specification Supported Hyper Parameter - Hyperparameter definitions supported by the algorithm. See Supported Hyper Parameters.
- Supported
Tuning []AlgorithmJob Objective Metrics Training Specification Supported Tuning Job Objective Metric - Objective metrics supported for hyperparameter tuning jobs. See Supported Tuning Job Objective Metrics.
- Supports
Distributed boolTraining - Whether the algorithm supports distributed training.
- Training
Image stringDigest - Digest of the training image.
- supported
Training List<String>Instance Types - Instance types supported for training.
- training
Channels List<AlgorithmTraining Specification Training Channel> - List of channel definitions supported for training. See Training Channels.
- training
Image String - Training image URI.
- additional
S3Data AlgorithmSource Training Specification Additional S3Data Source - Additional training data to make available to the algorithm. See Additional S3 Data Source.
- metric
Definitions List<AlgorithmTraining Specification Metric Definition> - List of metric definitions used to parse training logs. See Metric Definitions.
- supported
Hyper List<AlgorithmParameters Training Specification Supported Hyper Parameter> - Hyperparameter definitions supported by the algorithm. See Supported Hyper Parameters.
- supported
Tuning List<AlgorithmJob Objective Metrics Training Specification Supported Tuning Job Objective Metric> - Objective metrics supported for hyperparameter tuning jobs. See Supported Tuning Job Objective Metrics.
- supports
Distributed BooleanTraining - Whether the algorithm supports distributed training.
- training
Image StringDigest - Digest of the training image.
- supported
Training string[]Instance Types - Instance types supported for training.
- training
Channels AlgorithmTraining Specification Training Channel[] - List of channel definitions supported for training. See Training Channels.
- training
Image string - Training image URI.
- additional
S3Data AlgorithmSource Training Specification Additional S3Data Source - Additional training data to make available to the algorithm. See Additional S3 Data Source.
- metric
Definitions AlgorithmTraining Specification Metric Definition[] - List of metric definitions used to parse training logs. See Metric Definitions.
- supported
Hyper AlgorithmParameters Training Specification Supported Hyper Parameter[] - Hyperparameter definitions supported by the algorithm. See Supported Hyper Parameters.
- supported
Tuning AlgorithmJob Objective Metrics Training Specification Supported Tuning Job Objective Metric[] - Objective metrics supported for hyperparameter tuning jobs. See Supported Tuning Job Objective Metrics.
- supports
Distributed booleanTraining - Whether the algorithm supports distributed training.
- training
Image stringDigest - Digest of the training image.
- supported_
training_ Sequence[str]instance_ types - Instance types supported for training.
- training_
channels Sequence[AlgorithmTraining Specification Training Channel] - List of channel definitions supported for training. See Training Channels.
- training_
image str - Training image URI.
- additional_
s3_ Algorithmdata_ source Training Specification Additional S3Data Source - Additional training data to make available to the algorithm. See Additional S3 Data Source.
- metric_
definitions Sequence[AlgorithmTraining Specification Metric Definition] - List of metric definitions used to parse training logs. See Metric Definitions.
- supported_
hyper_ Sequence[Algorithmparameters Training Specification Supported Hyper Parameter] - Hyperparameter definitions supported by the algorithm. See Supported Hyper Parameters.
- supported_
tuning_ Sequence[Algorithmjob_ objective_ metrics Training Specification Supported Tuning Job Objective Metric] - Objective metrics supported for hyperparameter tuning jobs. See Supported Tuning Job Objective Metrics.
- supports_
distributed_ booltraining - Whether the algorithm supports distributed training.
- training_
image_ strdigest - Digest of the training image.
- supported
Training List<String>Instance Types - Instance types supported for training.
- training
Channels List<Property Map> - List of channel definitions supported for training. See Training Channels.
- training
Image String - Training image URI.
- additional
S3Data Property MapSource - Additional training data to make available to the algorithm. See Additional S3 Data Source.
- metric
Definitions List<Property Map> - List of metric definitions used to parse training logs. See Metric Definitions.
- supported
Hyper List<Property Map>Parameters - Hyperparameter definitions supported by the algorithm. See Supported Hyper Parameters.
- supported
Tuning List<Property Map>Job Objective Metrics - Objective metrics supported for hyperparameter tuning jobs. See Supported Tuning Job Objective Metrics.
- supports
Distributed BooleanTraining - Whether the algorithm supports distributed training.
- training
Image StringDigest - Digest of the training image.
AlgorithmTrainingSpecificationAdditionalS3DataSource, AlgorithmTrainingSpecificationAdditionalS3DataSourceArgs
- S3Data
Type string - Type of additional S3 data.
- S3Uri string
- S3 or HTTPS URI for the additional data.
- Compression
Type string - Compression type for the data. Allowed values are:
NoneandGzip. - Etag string
- ETag of the S3 object.
- S3Data
Type string - Type of additional S3 data.
- S3Uri string
- S3 or HTTPS URI for the additional data.
- Compression
Type string - Compression type for the data. Allowed values are:
NoneandGzip. - Etag string
- ETag of the S3 object.
- s3Data
Type String - Type of additional S3 data.
- s3Uri String
- S3 or HTTPS URI for the additional data.
- compression
Type String - Compression type for the data. Allowed values are:
NoneandGzip. - etag String
- ETag of the S3 object.
- s3Data
Type string - Type of additional S3 data.
- s3Uri string
- S3 or HTTPS URI for the additional data.
- compression
Type string - Compression type for the data. Allowed values are:
NoneandGzip. - etag string
- ETag of the S3 object.
- s3_
data_ strtype - Type of additional S3 data.
- s3_
uri str - S3 or HTTPS URI for the additional data.
- compression_
type str - Compression type for the data. Allowed values are:
NoneandGzip. - etag str
- ETag of the S3 object.
- s3Data
Type String - Type of additional S3 data.
- s3Uri String
- S3 or HTTPS URI for the additional data.
- compression
Type String - Compression type for the data. Allowed values are:
NoneandGzip. - etag String
- ETag of the S3 object.
AlgorithmTrainingSpecificationMetricDefinition, AlgorithmTrainingSpecificationMetricDefinitionArgs
AlgorithmTrainingSpecificationSupportedHyperParameter, AlgorithmTrainingSpecificationSupportedHyperParameterArgs
- Name string
- Hyperparameter name.
- Type string
- Hyperparameter type. Allowed values are:
Integer,Continuous,Categorical, andFreeText. - Default
Value string - Default value for the hyperparameter.
- Description string
- Description of the hyperparameter.
- Is
Required bool - Whether the hyperparameter is required.
- Is
Tunable bool - Whether the hyperparameter can be tuned.
- Range
Algorithm
Training Specification Supported Hyper Parameter Range - Allowed value range for the hyperparameter. See Parameter Range.
- Name string
- Hyperparameter name.
- Type string
- Hyperparameter type. Allowed values are:
Integer,Continuous,Categorical, andFreeText. - Default
Value string - Default value for the hyperparameter.
- Description string
- Description of the hyperparameter.
- Is
Required bool - Whether the hyperparameter is required.
- Is
Tunable bool - Whether the hyperparameter can be tuned.
- Range
Algorithm
Training Specification Supported Hyper Parameter Range - Allowed value range for the hyperparameter. See Parameter Range.
- name String
- Hyperparameter name.
- type String
- Hyperparameter type. Allowed values are:
Integer,Continuous,Categorical, andFreeText. - default
Value String - Default value for the hyperparameter.
- description String
- Description of the hyperparameter.
- is
Required Boolean - Whether the hyperparameter is required.
- is
Tunable Boolean - Whether the hyperparameter can be tuned.
- range
Algorithm
Training Specification Supported Hyper Parameter Range - Allowed value range for the hyperparameter. See Parameter Range.
- name string
- Hyperparameter name.
- type string
- Hyperparameter type. Allowed values are:
Integer,Continuous,Categorical, andFreeText. - default
Value string - Default value for the hyperparameter.
- description string
- Description of the hyperparameter.
- is
Required boolean - Whether the hyperparameter is required.
- is
Tunable boolean - Whether the hyperparameter can be tuned.
- range
Algorithm
Training Specification Supported Hyper Parameter Range - Allowed value range for the hyperparameter. See Parameter Range.
- name str
- Hyperparameter name.
- type str
- Hyperparameter type. Allowed values are:
Integer,Continuous,Categorical, andFreeText. - default_
value str - Default value for the hyperparameter.
- description str
- Description of the hyperparameter.
- is_
required bool - Whether the hyperparameter is required.
- is_
tunable bool - Whether the hyperparameter can be tuned.
- range
Algorithm
Training Specification Supported Hyper Parameter Range - Allowed value range for the hyperparameter. See Parameter Range.
- name String
- Hyperparameter name.
- type String
- Hyperparameter type. Allowed values are:
Integer,Continuous,Categorical, andFreeText. - default
Value String - Default value for the hyperparameter.
- description String
- Description of the hyperparameter.
- is
Required Boolean - Whether the hyperparameter is required.
- is
Tunable Boolean - Whether the hyperparameter can be tuned.
- range Property Map
- Allowed value range for the hyperparameter. See Parameter Range.
AlgorithmTrainingSpecificationSupportedHyperParameterRange, AlgorithmTrainingSpecificationSupportedHyperParameterRangeArgs
- Categorical
Parameter AlgorithmRange Specification Training Specification Supported Hyper Parameter Range Categorical Parameter Range Specification - Categorical range definition. See Categorical Parameter Range Specification.
- Continuous
Parameter AlgorithmRange Specification Training Specification Supported Hyper Parameter Range Continuous Parameter Range Specification - Continuous range definition. See Continuous Parameter Range Specification.
- Integer
Parameter AlgorithmRange Specification Training Specification Supported Hyper Parameter Range Integer Parameter Range Specification - Integer range definition. See Integer Parameter Range Specification.
- Categorical
Parameter AlgorithmRange Specification Training Specification Supported Hyper Parameter Range Categorical Parameter Range Specification - Categorical range definition. See Categorical Parameter Range Specification.
- Continuous
Parameter AlgorithmRange Specification Training Specification Supported Hyper Parameter Range Continuous Parameter Range Specification - Continuous range definition. See Continuous Parameter Range Specification.
- Integer
Parameter AlgorithmRange Specification Training Specification Supported Hyper Parameter Range Integer Parameter Range Specification - Integer range definition. See Integer Parameter Range Specification.
- categorical
Parameter AlgorithmRange Specification Training Specification Supported Hyper Parameter Range Categorical Parameter Range Specification - Categorical range definition. See Categorical Parameter Range Specification.
- continuous
Parameter AlgorithmRange Specification Training Specification Supported Hyper Parameter Range Continuous Parameter Range Specification - Continuous range definition. See Continuous Parameter Range Specification.
- integer
Parameter AlgorithmRange Specification Training Specification Supported Hyper Parameter Range Integer Parameter Range Specification - Integer range definition. See Integer Parameter Range Specification.
- categorical
Parameter AlgorithmRange Specification Training Specification Supported Hyper Parameter Range Categorical Parameter Range Specification - Categorical range definition. See Categorical Parameter Range Specification.
- continuous
Parameter AlgorithmRange Specification Training Specification Supported Hyper Parameter Range Continuous Parameter Range Specification - Continuous range definition. See Continuous Parameter Range Specification.
- integer
Parameter AlgorithmRange Specification Training Specification Supported Hyper Parameter Range Integer Parameter Range Specification - Integer range definition. See Integer Parameter Range Specification.
- categorical_
parameter_ Algorithmrange_ specification Training Specification Supported Hyper Parameter Range Categorical Parameter Range Specification - Categorical range definition. See Categorical Parameter Range Specification.
- continuous_
parameter_ Algorithmrange_ specification Training Specification Supported Hyper Parameter Range Continuous Parameter Range Specification - Continuous range definition. See Continuous Parameter Range Specification.
- integer_
parameter_ Algorithmrange_ specification Training Specification Supported Hyper Parameter Range Integer Parameter Range Specification - Integer range definition. See Integer Parameter Range Specification.
- categorical
Parameter Property MapRange Specification - Categorical range definition. See Categorical Parameter Range Specification.
- continuous
Parameter Property MapRange Specification - Continuous range definition. See Continuous Parameter Range Specification.
- integer
Parameter Property MapRange Specification - Integer range definition. See Integer Parameter Range Specification.
AlgorithmTrainingSpecificationSupportedHyperParameterRangeCategoricalParameterRangeSpecification, AlgorithmTrainingSpecificationSupportedHyperParameterRangeCategoricalParameterRangeSpecificationArgs
- Values List<string>
- Allowed categorical values.
- Values []string
- Allowed categorical values.
- values List<String>
- Allowed categorical values.
- values string[]
- Allowed categorical values.
- values Sequence[str]
- Allowed categorical values.
- values List<String>
- Allowed categorical values.
AlgorithmTrainingSpecificationSupportedHyperParameterRangeContinuousParameterRangeSpecification, AlgorithmTrainingSpecificationSupportedHyperParameterRangeContinuousParameterRangeSpecificationArgs
AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecification, AlgorithmTrainingSpecificationSupportedHyperParameterRangeIntegerParameterRangeSpecificationArgs
AlgorithmTrainingSpecificationSupportedTuningJobObjectiveMetric, AlgorithmTrainingSpecificationSupportedTuningJobObjectiveMetricArgs
- Metric
Name string - Metric name.
- Type string
- Objective type. Allowed values are:
MinimizeandMaximize.
- Metric
Name string - Metric name.
- Type string
- Objective type. Allowed values are:
MinimizeandMaximize.
- metric
Name String - Metric name.
- type String
- Objective type. Allowed values are:
MinimizeandMaximize.
- metric
Name string - Metric name.
- type string
- Objective type. Allowed values are:
MinimizeandMaximize.
- metric_
name str - Metric name.
- type str
- Objective type. Allowed values are:
MinimizeandMaximize.
- metric
Name String - Metric name.
- type String
- Objective type. Allowed values are:
MinimizeandMaximize.
AlgorithmTrainingSpecificationTrainingChannel, AlgorithmTrainingSpecificationTrainingChannelArgs
- Name string
- Channel name.
- Supported
Content List<string>Types - Supported input content types.
- Supported
Input List<string>Modes - Supported training input modes.
- Description string
- Description of the channel.
- Is
Required bool - Whether the channel is required.
- Supported
Compression List<string>Types - Supported compression types. Allowed values are:
NoneandGzip.
- Name string
- Channel name.
- Supported
Content []stringTypes - Supported input content types.
- Supported
Input []stringModes - Supported training input modes.
- Description string
- Description of the channel.
- Is
Required bool - Whether the channel is required.
- Supported
Compression []stringTypes - Supported compression types. Allowed values are:
NoneandGzip.
- name String
- Channel name.
- supported
Content List<String>Types - Supported input content types.
- supported
Input List<String>Modes - Supported training input modes.
- description String
- Description of the channel.
- is
Required Boolean - Whether the channel is required.
- supported
Compression List<String>Types - Supported compression types. Allowed values are:
NoneandGzip.
- name string
- Channel name.
- supported
Content string[]Types - Supported input content types.
- supported
Input string[]Modes - Supported training input modes.
- description string
- Description of the channel.
- is
Required boolean - Whether the channel is required.
- supported
Compression string[]Types - Supported compression types. Allowed values are:
NoneandGzip.
- name str
- Channel name.
- supported_
content_ Sequence[str]types - Supported input content types.
- supported_
input_ Sequence[str]modes - Supported training input modes.
- description str
- Description of the channel.
- is_
required bool - Whether the channel is required.
- supported_
compression_ Sequence[str]types - Supported compression types. Allowed values are:
NoneandGzip.
- name String
- Channel name.
- supported
Content List<String>Types - Supported input content types.
- supported
Input List<String>Modes - Supported training input modes.
- description String
- Description of the channel.
- is
Required Boolean - Whether the channel is required.
- supported
Compression List<String>Types - Supported compression types. Allowed values are:
NoneandGzip.
AlgorithmValidationSpecification, AlgorithmValidationSpecificationArgs
- Validation
Profiles AlgorithmValidation Specification Validation Profiles - Validation profiles for the algorithm. See Validation Profiles.
- Validation
Role string - IAM role ARN used for validation.
- Validation
Profiles AlgorithmValidation Specification Validation Profiles - Validation profiles for the algorithm. See Validation Profiles.
- Validation
Role string - IAM role ARN used for validation.
- validation
Profiles AlgorithmValidation Specification Validation Profiles - Validation profiles for the algorithm. See Validation Profiles.
- validation
Role String - IAM role ARN used for validation.
- validation
Profiles AlgorithmValidation Specification Validation Profiles - Validation profiles for the algorithm. See Validation Profiles.
- validation
Role string - IAM role ARN used for validation.
- validation_
profiles AlgorithmValidation Specification Validation Profiles - Validation profiles for the algorithm. See Validation Profiles.
- validation_
role str - IAM role ARN used for validation.
- validation
Profiles Property Map - Validation profiles for the algorithm. See Validation Profiles.
- validation
Role String - IAM role ARN used for validation.
AlgorithmValidationSpecificationValidationProfiles, AlgorithmValidationSpecificationValidationProfilesArgs
- Profile
Name string - Profile name.
- Training
Job AlgorithmDefinition Validation Specification Validation Profiles Training Job Definition - Training job definition used during validation. See Training Job Definition.
- Transform
Job AlgorithmDefinition Validation Specification Validation Profiles Transform Job Definition - Transform job definition used during validation. See Transform Job Definition.
- Profile
Name string - Profile name.
- Training
Job AlgorithmDefinition Validation Specification Validation Profiles Training Job Definition - Training job definition used during validation. See Training Job Definition.
- Transform
Job AlgorithmDefinition Validation Specification Validation Profiles Transform Job Definition - Transform job definition used during validation. See Transform Job Definition.
- profile
Name String - Profile name.
- training
Job AlgorithmDefinition Validation Specification Validation Profiles Training Job Definition - Training job definition used during validation. See Training Job Definition.
- transform
Job AlgorithmDefinition Validation Specification Validation Profiles Transform Job Definition - Transform job definition used during validation. See Transform Job Definition.
- profile
Name string - Profile name.
- training
Job AlgorithmDefinition Validation Specification Validation Profiles Training Job Definition - Training job definition used during validation. See Training Job Definition.
- transform
Job AlgorithmDefinition Validation Specification Validation Profiles Transform Job Definition - Transform job definition used during validation. See Transform Job Definition.
- profile_
name str - Profile name.
- training_
job_ Algorithmdefinition Validation Specification Validation Profiles Training Job Definition - Training job definition used during validation. See Training Job Definition.
- transform_
job_ Algorithmdefinition Validation Specification Validation Profiles Transform Job Definition - Transform job definition used during validation. See Transform Job Definition.
- profile
Name String - Profile name.
- training
Job Property MapDefinition - Training job definition used during validation. See Training Job Definition.
- transform
Job Property MapDefinition - Transform job definition used during validation. See Transform Job Definition.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinition, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionArgs
- Input
Data List<AlgorithmConfigs Validation Specification Validation Profiles Training Job Definition Input Data Config> - Input channel configuration for the validation training job. See Input Data Config.
- Output
Data AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Output Data Config - Output configuration for the validation training job. See Output Data Config.
- Resource
Config AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config - Resource configuration for the validation training job. See Resource Config.
- Stopping
Condition AlgorithmValidation Specification Validation Profiles Training Job Definition Stopping Condition - Stopping condition for the validation training job. See Stopping Condition.
- Training
Input stringMode - Input mode for the validation training job. Allowed values are:
Pipe,File, andFastFile. - Hyper
Parameters Dictionary<string, string> - Hyperparameters to pass to the training job.
- Input
Data []AlgorithmConfigs Validation Specification Validation Profiles Training Job Definition Input Data Config - Input channel configuration for the validation training job. See Input Data Config.
- Output
Data AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Output Data Config - Output configuration for the validation training job. See Output Data Config.
- Resource
Config AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config - Resource configuration for the validation training job. See Resource Config.
- Stopping
Condition AlgorithmValidation Specification Validation Profiles Training Job Definition Stopping Condition - Stopping condition for the validation training job. See Stopping Condition.
- Training
Input stringMode - Input mode for the validation training job. Allowed values are:
Pipe,File, andFastFile. - Hyper
Parameters map[string]string - Hyperparameters to pass to the training job.
- input
Data List<AlgorithmConfigs Validation Specification Validation Profiles Training Job Definition Input Data Config> - Input channel configuration for the validation training job. See Input Data Config.
- output
Data AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Output Data Config - Output configuration for the validation training job. See Output Data Config.
- resource
Config AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config - Resource configuration for the validation training job. See Resource Config.
- stopping
Condition AlgorithmValidation Specification Validation Profiles Training Job Definition Stopping Condition - Stopping condition for the validation training job. See Stopping Condition.
- training
Input StringMode - Input mode for the validation training job. Allowed values are:
Pipe,File, andFastFile. - hyper
Parameters Map<String,String> - Hyperparameters to pass to the training job.
- input
Data AlgorithmConfigs Validation Specification Validation Profiles Training Job Definition Input Data Config[] - Input channel configuration for the validation training job. See Input Data Config.
- output
Data AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Output Data Config - Output configuration for the validation training job. See Output Data Config.
- resource
Config AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config - Resource configuration for the validation training job. See Resource Config.
- stopping
Condition AlgorithmValidation Specification Validation Profiles Training Job Definition Stopping Condition - Stopping condition for the validation training job. See Stopping Condition.
- training
Input stringMode - Input mode for the validation training job. Allowed values are:
Pipe,File, andFastFile. - hyper
Parameters {[key: string]: string} - Hyperparameters to pass to the training job.
- input_
data_ Sequence[Algorithmconfigs Validation Specification Validation Profiles Training Job Definition Input Data Config] - Input channel configuration for the validation training job. See Input Data Config.
- output_
data_ Algorithmconfig Validation Specification Validation Profiles Training Job Definition Output Data Config - Output configuration for the validation training job. See Output Data Config.
- resource_
config AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config - Resource configuration for the validation training job. See Resource Config.
- stopping_
condition AlgorithmValidation Specification Validation Profiles Training Job Definition Stopping Condition - Stopping condition for the validation training job. See Stopping Condition.
- training_
input_ strmode - Input mode for the validation training job. Allowed values are:
Pipe,File, andFastFile. - hyper_
parameters Mapping[str, str] - Hyperparameters to pass to the training job.
- input
Data List<Property Map>Configs - Input channel configuration for the validation training job. See Input Data Config.
- output
Data Property MapConfig - Output configuration for the validation training job. See Output Data Config.
- resource
Config Property Map - Resource configuration for the validation training job. See Resource Config.
- stopping
Condition Property Map - Stopping condition for the validation training job. See Stopping Condition.
- training
Input StringMode - Input mode for the validation training job. Allowed values are:
Pipe,File, andFastFile. - hyper
Parameters Map<String> - Hyperparameters to pass to the training job.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfig, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigArgs
- Channel
Name string - Name of the channel.
- Data
Source AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Data Source - Source of the input data. See Data Source.
- Compression
Type string - Compression type of the input data. Allowed values are:
NoneandGzip. - Content
Type string - MIME type of the input data.
- Input
Mode string - Training input mode for the channel. Allowed values are:
Pipe,File, andFastFile. - Record
Wrapper stringType - Record wrapper type. Allowed values are:
NoneandRecordIO. - Shuffle
Config AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Shuffle Config - Shuffle configuration for the channel. See Shuffle Config.
- Channel
Name string - Name of the channel.
- Data
Source AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Data Source - Source of the input data. See Data Source.
- Compression
Type string - Compression type of the input data. Allowed values are:
NoneandGzip. - Content
Type string - MIME type of the input data.
- Input
Mode string - Training input mode for the channel. Allowed values are:
Pipe,File, andFastFile. - Record
Wrapper stringType - Record wrapper type. Allowed values are:
NoneandRecordIO. - Shuffle
Config AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Shuffle Config - Shuffle configuration for the channel. See Shuffle Config.
- channel
Name String - Name of the channel.
- data
Source AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Data Source - Source of the input data. See Data Source.
- compression
Type String - Compression type of the input data. Allowed values are:
NoneandGzip. - content
Type String - MIME type of the input data.
- input
Mode String - Training input mode for the channel. Allowed values are:
Pipe,File, andFastFile. - record
Wrapper StringType - Record wrapper type. Allowed values are:
NoneandRecordIO. - shuffle
Config AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Shuffle Config - Shuffle configuration for the channel. See Shuffle Config.
- channel
Name string - Name of the channel.
- data
Source AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Data Source - Source of the input data. See Data Source.
- compression
Type string - Compression type of the input data. Allowed values are:
NoneandGzip. - content
Type string - MIME type of the input data.
- input
Mode string - Training input mode for the channel. Allowed values are:
Pipe,File, andFastFile. - record
Wrapper stringType - Record wrapper type. Allowed values are:
NoneandRecordIO. - shuffle
Config AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Shuffle Config - Shuffle configuration for the channel. See Shuffle Config.
- channel_
name str - Name of the channel.
- data_
source AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Data Source - Source of the input data. See Data Source.
- compression_
type str - Compression type of the input data. Allowed values are:
NoneandGzip. - content_
type str - MIME type of the input data.
- input_
mode str - Training input mode for the channel. Allowed values are:
Pipe,File, andFastFile. - record_
wrapper_ strtype - Record wrapper type. Allowed values are:
NoneandRecordIO. - shuffle_
config AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Shuffle Config - Shuffle configuration for the channel. See Shuffle Config.
- channel
Name String - Name of the channel.
- data
Source Property Map - Source of the input data. See Data Source.
- compression
Type String - Compression type of the input data. Allowed values are:
NoneandGzip. - content
Type String - MIME type of the input data.
- input
Mode String - Training input mode for the channel. Allowed values are:
Pipe,File, andFastFile. - record
Wrapper StringType - Record wrapper type. Allowed values are:
NoneandRecordIO. - shuffle
Config Property Map - Shuffle configuration for the channel. See Shuffle Config.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSource, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceArgs
- File
System AlgorithmData Source Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source File System Data Source - File system-backed data source. See File System Data Source.
- S3Data
Source AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source - S3-backed training data source. See Training S3 Data Source.
- File
System AlgorithmData Source Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source File System Data Source - File system-backed data source. See File System Data Source.
- S3Data
Source AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source - S3-backed training data source. See Training S3 Data Source.
- file
System AlgorithmData Source Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source File System Data Source - File system-backed data source. See File System Data Source.
- s3Data
Source AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source - S3-backed training data source. See Training S3 Data Source.
- file
System AlgorithmData Source Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source File System Data Source - File system-backed data source. See File System Data Source.
- s3Data
Source AlgorithmValidation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source - S3-backed training data source. See Training S3 Data Source.
- file_
system_ Algorithmdata_ source Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source File System Data Source - File system-backed data source. See File System Data Source.
- s3_
data_ Algorithmsource Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source - S3-backed training data source. See Training S3 Data Source.
- file
System Property MapData Source - File system-backed data source. See File System Data Source.
- s3Data
Source Property Map - S3-backed training data source. See Training S3 Data Source.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceFileSystemDataSource, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceFileSystemDataSourceArgs
- Directory
Path string - Path to the directory in the mounted file system.
- File
System stringAccess Mode - File system access mode.
- File
System stringId - ID of the file system.
- File
System stringType - File system type.
- Directory
Path string - Path to the directory in the mounted file system.
- File
System stringAccess Mode - File system access mode.
- File
System stringId - ID of the file system.
- File
System stringType - File system type.
- directory
Path String - Path to the directory in the mounted file system.
- file
System StringAccess Mode - File system access mode.
- file
System StringId - ID of the file system.
- file
System StringType - File system type.
- directory
Path string - Path to the directory in the mounted file system.
- file
System stringAccess Mode - File system access mode.
- file
System stringId - ID of the file system.
- file
System stringType - File system type.
- directory_
path str - Path to the directory in the mounted file system.
- file_
system_ straccess_ mode - File system access mode.
- file_
system_ strid - ID of the file system.
- file_
system_ strtype - File system type.
- directory
Path String - Path to the directory in the mounted file system.
- file
System StringAccess Mode - File system access mode.
- file
System StringId - ID of the file system.
- file
System StringType - File system type.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSource, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceArgs
- S3Data
Type string - S3Uri string
- Attribute
Names List<string> - List of JSON attribute names to select from the input data.
- Hub
Access AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source Hub Access Config - Instance
Group List<string>Names - Instance group names associated with the data source.
- Model
Access AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source Model Access Config - S3Data
Distribution stringType - Distribution type for S3 data. Allowed values are:
FullyReplicatedandShardedByS3Key.
- S3Data
Type string - S3Uri string
- Attribute
Names []string - List of JSON attribute names to select from the input data.
- Hub
Access AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source Hub Access Config - Instance
Group []stringNames - Instance group names associated with the data source.
- Model
Access AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source Model Access Config - S3Data
Distribution stringType - Distribution type for S3 data. Allowed values are:
FullyReplicatedandShardedByS3Key.
- s3Data
Type String - s3Uri String
- attribute
Names List<String> - List of JSON attribute names to select from the input data.
- hub
Access AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source Hub Access Config - instance
Group List<String>Names - Instance group names associated with the data source.
- model
Access AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source Model Access Config - s3Data
Distribution StringType - Distribution type for S3 data. Allowed values are:
FullyReplicatedandShardedByS3Key.
- s3Data
Type string - s3Uri string
- attribute
Names string[] - List of JSON attribute names to select from the input data.
- hub
Access AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source Hub Access Config - instance
Group string[]Names - Instance group names associated with the data source.
- model
Access AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source Model Access Config - s3Data
Distribution stringType - Distribution type for S3 data. Allowed values are:
FullyReplicatedandShardedByS3Key.
- s3_
data_ strtype - s3_
uri str - attribute_
names Sequence[str] - List of JSON attribute names to select from the input data.
- hub_
access_ Algorithmconfig Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source Hub Access Config - instance_
group_ Sequence[str]names - Instance group names associated with the data source.
- model_
access_ Algorithmconfig Validation Specification Validation Profiles Training Job Definition Input Data Config Data Source S3Data Source Model Access Config - s3_
data_ strdistribution_ type - Distribution type for S3 data. Allowed values are:
FullyReplicatedandShardedByS3Key.
- s3Data
Type String - s3Uri String
- attribute
Names List<String> - List of JSON attribute names to select from the input data.
- hub
Access Property MapConfig - instance
Group List<String>Names - Instance group names associated with the data source.
- model
Access Property MapConfig - s3Data
Distribution StringType - Distribution type for S3 data. Allowed values are:
FullyReplicatedandShardedByS3Key.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceHubAccessConfig, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceHubAccessConfigArgs
- Hub
Content stringArn - ARN of the SageMaker AI Hub content.
- Hub
Content stringArn - ARN of the SageMaker AI Hub content.
- hub
Content StringArn - ARN of the SageMaker AI Hub content.
- hub
Content stringArn - ARN of the SageMaker AI Hub content.
- hub_
content_ strarn - ARN of the SageMaker AI Hub content.
- hub
Content StringArn - ARN of the SageMaker AI Hub content.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceModelAccessConfig, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigDataSourceS3DataSourceModelAccessConfigArgs
- Accept
Eula bool - Whether to accept the model end-user license agreement.
- Accept
Eula bool - Whether to accept the model end-user license agreement.
- accept
Eula Boolean - Whether to accept the model end-user license agreement.
- accept
Eula boolean - Whether to accept the model end-user license agreement.
- accept_
eula bool - Whether to accept the model end-user license agreement.
- accept
Eula Boolean - Whether to accept the model end-user license agreement.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigShuffleConfig, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionInputDataConfigShuffleConfigArgs
- Seed int
- Shuffle seed.
- Seed int
- Shuffle seed.
- seed Integer
- Shuffle seed.
- seed number
- Shuffle seed.
- seed int
- Shuffle seed.
- seed Number
- Shuffle seed.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionOutputDataConfig, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionOutputDataConfigArgs
- S3Output
Path string - S3 or HTTPS URI where output data is stored.
- Compression
Type string - Compression type for the output data. Allowed values are:
NoneandGZIP. - Kms
Key stringId - KMS key ID used to encrypt output data.
- S3Output
Path string - S3 or HTTPS URI where output data is stored.
- Compression
Type string - Compression type for the output data. Allowed values are:
NoneandGZIP. - Kms
Key stringId - KMS key ID used to encrypt output data.
- s3Output
Path String - S3 or HTTPS URI where output data is stored.
- compression
Type String - Compression type for the output data. Allowed values are:
NoneandGZIP. - kms
Key StringId - KMS key ID used to encrypt output data.
- s3Output
Path string - S3 or HTTPS URI where output data is stored.
- compression
Type string - Compression type for the output data. Allowed values are:
NoneandGZIP. - kms
Key stringId - KMS key ID used to encrypt output data.
- s3_
output_ strpath - S3 or HTTPS URI where output data is stored.
- compression_
type str - Compression type for the output data. Allowed values are:
NoneandGZIP. - kms_
key_ strid - KMS key ID used to encrypt output data.
- s3Output
Path String - S3 or HTTPS URI where output data is stored.
- compression
Type String - Compression type for the output data. Allowed values are:
NoneandGZIP. - kms
Key StringId - KMS key ID used to encrypt output data.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfig, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigArgs
- Instance
Count int - Number of training instances.
- Instance
Groups List<AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config Instance Group> - Instance group definitions for the training job. See Instance Groups.
- Instance
Placement AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Resource Config Instance Placement Config - Placement configuration for the training job. See Instance Placement Config.
- Instance
Type string - Training instance type.
- Keep
Alive intPeriod In Seconds - Warm pool keep-alive period in seconds.
- Training
Plan stringArn - ARN of the SageMaker AI training plan.
- Volume
Kms stringKey Id - KMS key ID used to encrypt the training volume.
- Volume
Size intIn Gb - Size of the training volume in GiB.
- Instance
Count int - Number of training instances.
- Instance
Groups []AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config Instance Group - Instance group definitions for the training job. See Instance Groups.
- Instance
Placement AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Resource Config Instance Placement Config - Placement configuration for the training job. See Instance Placement Config.
- Instance
Type string - Training instance type.
- Keep
Alive intPeriod In Seconds - Warm pool keep-alive period in seconds.
- Training
Plan stringArn - ARN of the SageMaker AI training plan.
- Volume
Kms stringKey Id - KMS key ID used to encrypt the training volume.
- Volume
Size intIn Gb - Size of the training volume in GiB.
- instance
Count Integer - Number of training instances.
- instance
Groups List<AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config Instance Group> - Instance group definitions for the training job. See Instance Groups.
- instance
Placement AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Resource Config Instance Placement Config - Placement configuration for the training job. See Instance Placement Config.
- instance
Type String - Training instance type.
- keep
Alive IntegerPeriod In Seconds - Warm pool keep-alive period in seconds.
- training
Plan StringArn - ARN of the SageMaker AI training plan.
- volume
Kms StringKey Id - KMS key ID used to encrypt the training volume.
- volume
Size IntegerIn Gb - Size of the training volume in GiB.
- instance
Count number - Number of training instances.
- instance
Groups AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config Instance Group[] - Instance group definitions for the training job. See Instance Groups.
- instance
Placement AlgorithmConfig Validation Specification Validation Profiles Training Job Definition Resource Config Instance Placement Config - Placement configuration for the training job. See Instance Placement Config.
- instance
Type string - Training instance type.
- keep
Alive numberPeriod In Seconds - Warm pool keep-alive period in seconds.
- training
Plan stringArn - ARN of the SageMaker AI training plan.
- volume
Kms stringKey Id - KMS key ID used to encrypt the training volume.
- volume
Size numberIn Gb - Size of the training volume in GiB.
- instance_
count int - Number of training instances.
- instance_
groups Sequence[AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config Instance Group] - Instance group definitions for the training job. See Instance Groups.
- instance_
placement_ Algorithmconfig Validation Specification Validation Profiles Training Job Definition Resource Config Instance Placement Config - Placement configuration for the training job. See Instance Placement Config.
- instance_
type str - Training instance type.
- keep_
alive_ intperiod_ in_ seconds - Warm pool keep-alive period in seconds.
- training_
plan_ strarn - ARN of the SageMaker AI training plan.
- volume_
kms_ strkey_ id - KMS key ID used to encrypt the training volume.
- volume_
size_ intin_ gb - Size of the training volume in GiB.
- instance
Count Number - Number of training instances.
- instance
Groups List<Property Map> - Instance group definitions for the training job. See Instance Groups.
- instance
Placement Property MapConfig - Placement configuration for the training job. See Instance Placement Config.
- instance
Type String - Training instance type.
- keep
Alive NumberPeriod In Seconds - Warm pool keep-alive period in seconds.
- training
Plan StringArn - ARN of the SageMaker AI training plan.
- volume
Kms StringKey Id - KMS key ID used to encrypt the training volume.
- volume
Size NumberIn Gb - Size of the training volume in GiB.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstanceGroup, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstanceGroupArgs
- Instance
Count int - Number of instances in the group.
- Instance
Group stringName - Name of the instance group.
- Instance
Type string - Instance type for the group.
- Instance
Count int - Number of instances in the group.
- Instance
Group stringName - Name of the instance group.
- Instance
Type string - Instance type for the group.
- instance
Count Integer - Number of instances in the group.
- instance
Group StringName - Name of the instance group.
- instance
Type String - Instance type for the group.
- instance
Count number - Number of instances in the group.
- instance
Group stringName - Name of the instance group.
- instance
Type string - Instance type for the group.
- instance_
count int - Number of instances in the group.
- instance_
group_ strname - Name of the instance group.
- instance_
type str - Instance type for the group.
- instance
Count Number - Number of instances in the group.
- instance
Group StringName - Name of the instance group.
- instance
Type String - Instance type for the group.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstancePlacementConfig, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstancePlacementConfigArgs
- Enable
Multiple boolJobs - Whether multiple jobs can share the placement configuration.
- Placement
Specifications List<AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config Instance Placement Config Placement Specification> - Placement specifications for ultra servers. See Placement Specifications.
- Enable
Multiple boolJobs - Whether multiple jobs can share the placement configuration.
- Placement
Specifications []AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config Instance Placement Config Placement Specification - Placement specifications for ultra servers. See Placement Specifications.
- enable
Multiple BooleanJobs - Whether multiple jobs can share the placement configuration.
- placement
Specifications List<AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config Instance Placement Config Placement Specification> - Placement specifications for ultra servers. See Placement Specifications.
- enable
Multiple booleanJobs - Whether multiple jobs can share the placement configuration.
- placement
Specifications AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config Instance Placement Config Placement Specification[] - Placement specifications for ultra servers. See Placement Specifications.
- enable_
multiple_ booljobs - Whether multiple jobs can share the placement configuration.
- placement_
specifications Sequence[AlgorithmValidation Specification Validation Profiles Training Job Definition Resource Config Instance Placement Config Placement Specification] - Placement specifications for ultra servers. See Placement Specifications.
- enable
Multiple BooleanJobs - Whether multiple jobs can share the placement configuration.
- placement
Specifications List<Property Map> - Placement specifications for ultra servers. See Placement Specifications.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstancePlacementConfigPlacementSpecification, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionResourceConfigInstancePlacementConfigPlacementSpecificationArgs
- Instance
Count int - Number of instances for the placement specification.
- Ultra
Server stringId - Ultra server ID.
- Instance
Count int - Number of instances for the placement specification.
- Ultra
Server stringId - Ultra server ID.
- instance
Count Integer - Number of instances for the placement specification.
- ultra
Server StringId - Ultra server ID.
- instance
Count number - Number of instances for the placement specification.
- ultra
Server stringId - Ultra server ID.
- instance_
count int - Number of instances for the placement specification.
- ultra_
server_ strid - Ultra server ID.
- instance
Count Number - Number of instances for the placement specification.
- ultra
Server StringId - Ultra server ID.
AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionStoppingCondition, AlgorithmValidationSpecificationValidationProfilesTrainingJobDefinitionStoppingConditionArgs
- Max
Pending intTime In Seconds - Maximum time, in seconds, a job can remain pending.
- Max
Runtime intIn Seconds - Maximum runtime, in seconds, for the training job.
- Max
Wait intTime In Seconds - Maximum wait time, in seconds, including spot interruptions.
- Max
Pending intTime In Seconds - Maximum time, in seconds, a job can remain pending.
- Max
Runtime intIn Seconds - Maximum runtime, in seconds, for the training job.
- Max
Wait intTime In Seconds - Maximum wait time, in seconds, including spot interruptions.
- max
Pending IntegerTime In Seconds - Maximum time, in seconds, a job can remain pending.
- max
Runtime IntegerIn Seconds - Maximum runtime, in seconds, for the training job.
- max
Wait IntegerTime In Seconds - Maximum wait time, in seconds, including spot interruptions.
- max
Pending numberTime In Seconds - Maximum time, in seconds, a job can remain pending.
- max
Runtime numberIn Seconds - Maximum runtime, in seconds, for the training job.
- max
Wait numberTime In Seconds - Maximum wait time, in seconds, including spot interruptions.
- max_
pending_ inttime_ in_ seconds - Maximum time, in seconds, a job can remain pending.
- max_
runtime_ intin_ seconds - Maximum runtime, in seconds, for the training job.
- max_
wait_ inttime_ in_ seconds - Maximum wait time, in seconds, including spot interruptions.
- max
Pending NumberTime In Seconds - Maximum time, in seconds, a job can remain pending.
- max
Runtime NumberIn Seconds - Maximum runtime, in seconds, for the training job.
- max
Wait NumberTime In Seconds - Maximum wait time, in seconds, including spot interruptions.
AlgorithmValidationSpecificationValidationProfilesTransformJobDefinition, AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionArgs
- Transform
Input AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input - Input configuration for the transform job. See Transform Input.
- Transform
Output AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Output - Output configuration for the transform job. See Transform Output.
- Transform
Resources AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Resources - Compute resources for the transform job. See Transform Resources.
- Batch
Strategy string - Batch strategy for the transform job. Allowed values are:
MultiRecordandSingleRecord. - Environment Dictionary<string, string>
- Environment variables to pass to the transform container.
- Max
Concurrent intTransforms - Maximum number of parallel transform requests.
- Max
Payload intIn Mb - Maximum payload size, in MiB, for transform requests.
- Transform
Input AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input - Input configuration for the transform job. See Transform Input.
- Transform
Output AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Output - Output configuration for the transform job. See Transform Output.
- Transform
Resources AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Resources - Compute resources for the transform job. See Transform Resources.
- Batch
Strategy string - Batch strategy for the transform job. Allowed values are:
MultiRecordandSingleRecord. - Environment map[string]string
- Environment variables to pass to the transform container.
- Max
Concurrent intTransforms - Maximum number of parallel transform requests.
- Max
Payload intIn Mb - Maximum payload size, in MiB, for transform requests.
- transform
Input AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input - Input configuration for the transform job. See Transform Input.
- transform
Output AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Output - Output configuration for the transform job. See Transform Output.
- transform
Resources AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Resources - Compute resources for the transform job. See Transform Resources.
- batch
Strategy String - Batch strategy for the transform job. Allowed values are:
MultiRecordandSingleRecord. - environment Map<String,String>
- Environment variables to pass to the transform container.
- max
Concurrent IntegerTransforms - Maximum number of parallel transform requests.
- max
Payload IntegerIn Mb - Maximum payload size, in MiB, for transform requests.
- transform
Input AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input - Input configuration for the transform job. See Transform Input.
- transform
Output AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Output - Output configuration for the transform job. See Transform Output.
- transform
Resources AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Resources - Compute resources for the transform job. See Transform Resources.
- batch
Strategy string - Batch strategy for the transform job. Allowed values are:
MultiRecordandSingleRecord. - environment {[key: string]: string}
- Environment variables to pass to the transform container.
- max
Concurrent numberTransforms - Maximum number of parallel transform requests.
- max
Payload numberIn Mb - Maximum payload size, in MiB, for transform requests.
- transform_
input AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input - Input configuration for the transform job. See Transform Input.
- transform_
output AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Output - Output configuration for the transform job. See Transform Output.
- transform_
resources AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Resources - Compute resources for the transform job. See Transform Resources.
- batch_
strategy str - Batch strategy for the transform job. Allowed values are:
MultiRecordandSingleRecord. - environment Mapping[str, str]
- Environment variables to pass to the transform container.
- max_
concurrent_ inttransforms - Maximum number of parallel transform requests.
- max_
payload_ intin_ mb - Maximum payload size, in MiB, for transform requests.
- transform
Input Property Map - Input configuration for the transform job. See Transform Input.
- transform
Output Property Map - Output configuration for the transform job. See Transform Output.
- transform
Resources Property Map - Compute resources for the transform job. See Transform Resources.
- batch
Strategy String - Batch strategy for the transform job. Allowed values are:
MultiRecordandSingleRecord. - environment Map<String>
- Environment variables to pass to the transform container.
- max
Concurrent NumberTransforms - Maximum number of parallel transform requests.
- max
Payload NumberIn Mb - Maximum payload size, in MiB, for transform requests.
AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInput, AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputArgs
- Data
Source AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input Data Source - Data source for the transform job. See Transform Job Data Source.
- Compression
Type string - Compression type of the input data. Allowed values are:
NoneandGzip. - Content
Type string - MIME type of the input data.
- Split
Type string - Method used to split the transform input. Allowed values are:
None,Line,RecordIO, andTFRecord.
- Data
Source AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input Data Source - Data source for the transform job. See Transform Job Data Source.
- Compression
Type string - Compression type of the input data. Allowed values are:
NoneandGzip. - Content
Type string - MIME type of the input data.
- Split
Type string - Method used to split the transform input. Allowed values are:
None,Line,RecordIO, andTFRecord.
- data
Source AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input Data Source - Data source for the transform job. See Transform Job Data Source.
- compression
Type String - Compression type of the input data. Allowed values are:
NoneandGzip. - content
Type String - MIME type of the input data.
- split
Type String - Method used to split the transform input. Allowed values are:
None,Line,RecordIO, andTFRecord.
- data
Source AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input Data Source - Data source for the transform job. See Transform Job Data Source.
- compression
Type string - Compression type of the input data. Allowed values are:
NoneandGzip. - content
Type string - MIME type of the input data.
- split
Type string - Method used to split the transform input. Allowed values are:
None,Line,RecordIO, andTFRecord.
- data_
source AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input Data Source - Data source for the transform job. See Transform Job Data Source.
- compression_
type str - Compression type of the input data. Allowed values are:
NoneandGzip. - content_
type str - MIME type of the input data.
- split_
type str - Method used to split the transform input. Allowed values are:
None,Line,RecordIO, andTFRecord.
- data
Source Property Map - Data source for the transform job. See Transform Job Data Source.
- compression
Type String - Compression type of the input data. Allowed values are:
NoneandGzip. - content
Type String - MIME type of the input data.
- split
Type String - Method used to split the transform input. Allowed values are:
None,Line,RecordIO, andTFRecord.
AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSource, AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceArgs
- S3Data
Source AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input Data Source S3Data Source - S3-backed training data source. See Training S3 Data Source.
- S3Data
Source AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input Data Source S3Data Source - S3-backed training data source. See Training S3 Data Source.
- s3Data
Source AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input Data Source S3Data Source - S3-backed training data source. See Training S3 Data Source.
- s3Data
Source AlgorithmValidation Specification Validation Profiles Transform Job Definition Transform Input Data Source S3Data Source - S3-backed training data source. See Training S3 Data Source.
- s3_
data_ Algorithmsource Validation Specification Validation Profiles Transform Job Definition Transform Input Data Source S3Data Source - S3-backed training data source. See Training S3 Data Source.
- s3Data
Source Property Map - S3-backed training data source. See Training S3 Data Source.
AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceS3DataSource, AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformInputDataSourceS3DataSourceArgs
- S3Data
Type string - S3Uri string
- S3Data
Type string - S3Uri string
- s3Data
Type String - s3Uri String
- s3Data
Type string - s3Uri string
- s3_
data_ strtype - s3_
uri str
- s3Data
Type String - s3Uri String
AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformOutput, AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformOutputArgs
- S3Output
Path string - S3 or HTTPS URI where transform output is stored.
- Accept string
- MIME type of the transform output.
- Assemble
With string - Method used to assemble the transform output. Allowed values are:
NoneandLine. - Kms
Key stringId - KMS key ID used to encrypt transform output.
- S3Output
Path string - S3 or HTTPS URI where transform output is stored.
- Accept string
- MIME type of the transform output.
- Assemble
With string - Method used to assemble the transform output. Allowed values are:
NoneandLine. - Kms
Key stringId - KMS key ID used to encrypt transform output.
- s3Output
Path String - S3 or HTTPS URI where transform output is stored.
- accept String
- MIME type of the transform output.
- assemble
With String - Method used to assemble the transform output. Allowed values are:
NoneandLine. - kms
Key StringId - KMS key ID used to encrypt transform output.
- s3Output
Path string - S3 or HTTPS URI where transform output is stored.
- accept string
- MIME type of the transform output.
- assemble
With string - Method used to assemble the transform output. Allowed values are:
NoneandLine. - kms
Key stringId - KMS key ID used to encrypt transform output.
- s3_
output_ strpath - S3 or HTTPS URI where transform output is stored.
- accept str
- MIME type of the transform output.
- assemble_
with str - Method used to assemble the transform output. Allowed values are:
NoneandLine. - kms_
key_ strid - KMS key ID used to encrypt transform output.
- s3Output
Path String - S3 or HTTPS URI where transform output is stored.
- accept String
- MIME type of the transform output.
- assemble
With String - Method used to assemble the transform output. Allowed values are:
NoneandLine. - kms
Key StringId - KMS key ID used to encrypt transform output.
AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformResources, AlgorithmValidationSpecificationValidationProfilesTransformJobDefinitionTransformResourcesArgs
- Instance
Count int - Number of transform instances.
- Instance
Type string - Transform instance type.
- Transform
Ami stringVersion - Transform AMI version.
- Volume
Kms stringKey Id - KMS key ID used to encrypt the transform volume.
- Instance
Count int - Number of transform instances.
- Instance
Type string - Transform instance type.
- Transform
Ami stringVersion - Transform AMI version.
- Volume
Kms stringKey Id - KMS key ID used to encrypt the transform volume.
- instance
Count Integer - Number of transform instances.
- instance
Type String - Transform instance type.
- transform
Ami StringVersion - Transform AMI version.
- volume
Kms StringKey Id - KMS key ID used to encrypt the transform volume.
- instance
Count number - Number of transform instances.
- instance
Type string - Transform instance type.
- transform
Ami stringVersion - Transform AMI version.
- volume
Kms stringKey Id - KMS key ID used to encrypt the transform volume.
- instance_
count int - Number of transform instances.
- instance_
type str - Transform instance type.
- transform_
ami_ strversion - Transform AMI version.
- volume_
kms_ strkey_ id - KMS key ID used to encrypt the transform volume.
- instance
Count Number - Number of transform instances.
- instance
Type String - Transform instance type.
- transform
Ami StringVersion - Transform AMI version.
- volume
Kms StringKey Id - KMS key ID used to encrypt the transform volume.
Import
Identity Schema
Required
algorithmName- (String) Name of the algorithm.
Optional
accountId- (String) AWS account where this resource is managed.region- (String) Region where this resource is managed.
Using pulumi import, import SageMaker AI Algorithms using algorithmName. For example:
$ pulumi import aws:sagemaker/algorithm:Algorithm example example-algorithm
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
awsTerraform Provider.
published on Tuesday, Mar 31, 2026 by Pulumi
