1. Packages
  2. Hsdp Provider
  3. API Docs
  4. AiInferenceJob
hsdp 0.65.3 published on Tuesday, Apr 15, 2025 by philips-software

hsdp.AiInferenceJob

Explore with Pulumi AI

hsdp logo
hsdp 0.65.3 published on Tuesday, Apr 15, 2025 by philips-software

    Manages HSDP AI Inference Jobs

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as hsdp from "@pulumi/hsdp";
    
    const job = new hsdp.AiInferenceJob("job", {
        endpoint: data.hsdp_ai_inference_service_instance.inference.endpoint,
        description: "Long running Inference Job",
        timeout: 60,
        model: {
            reference: hsdp_ai_inference_model.model.reference,
        },
        computeTarget: {
            reference: hsdp_ai_inference_compute_target.target.reference,
        },
        inputs: [{
            name: "train",
            url: "s3://input-sagemaker-64q6eey/data/input",
        }],
        outputs: [{
            name: "train",
            url: "s3://input-sagemaker-64q6eey/data/prediction",
        }],
        environment: {
            FOO: "bar",
            BAR: "baz",
        },
        commandArgs: [
            "-f",
            "abc",
        ],
        labels: [
            "BONEAGE",
            "CNN",
        ],
    });
    
    import pulumi
    import pulumi_hsdp as hsdp
    
    job = hsdp.AiInferenceJob("job",
        endpoint=data["hsdp_ai_inference_service_instance"]["inference"]["endpoint"],
        description="Long running Inference Job",
        timeout=60,
        model={
            "reference": hsdp_ai_inference_model["model"]["reference"],
        },
        compute_target={
            "reference": hsdp_ai_inference_compute_target["target"]["reference"],
        },
        inputs=[{
            "name": "train",
            "url": "s3://input-sagemaker-64q6eey/data/input",
        }],
        outputs=[{
            "name": "train",
            "url": "s3://input-sagemaker-64q6eey/data/prediction",
        }],
        environment={
            "FOO": "bar",
            "BAR": "baz",
        },
        command_args=[
            "-f",
            "abc",
        ],
        labels=[
            "BONEAGE",
            "CNN",
        ])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-terraform-provider/sdks/go/hsdp/hsdp"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := hsdp.NewAiInferenceJob(ctx, "job", &hsdp.AiInferenceJobArgs{
    			Endpoint:    pulumi.Any(data.Hsdp_ai_inference_service_instance.Inference.Endpoint),
    			Description: pulumi.String("Long running Inference Job"),
    			Timeout:     pulumi.Float64(60),
    			Model: &hsdp.AiInferenceJobModelArgs{
    				Reference: pulumi.Any(hsdp_ai_inference_model.Model.Reference),
    			},
    			ComputeTarget: &hsdp.AiInferenceJobComputeTargetArgs{
    				Reference: pulumi.Any(hsdp_ai_inference_compute_target.Target.Reference),
    			},
    			Inputs: hsdp.AiInferenceJobInputTypeArray{
    				&hsdp.AiInferenceJobInputTypeArgs{
    					Name: pulumi.String("train"),
    					Url:  pulumi.String("s3://input-sagemaker-64q6eey/data/input"),
    				},
    			},
    			Outputs: hsdp.AiInferenceJobOutputTypeArray{
    				&hsdp.AiInferenceJobOutputTypeArgs{
    					Name: pulumi.String("train"),
    					Url:  pulumi.String("s3://input-sagemaker-64q6eey/data/prediction"),
    				},
    			},
    			Environment: pulumi.StringMap{
    				"FOO": pulumi.String("bar"),
    				"BAR": pulumi.String("baz"),
    			},
    			CommandArgs: pulumi.StringArray{
    				pulumi.String("-f"),
    				pulumi.String("abc"),
    			},
    			Labels: pulumi.StringArray{
    				pulumi.String("BONEAGE"),
    				pulumi.String("CNN"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Hsdp = Pulumi.Hsdp;
    
    return await Deployment.RunAsync(() => 
    {
        var job = new Hsdp.AiInferenceJob("job", new()
        {
            Endpoint = data.Hsdp_ai_inference_service_instance.Inference.Endpoint,
            Description = "Long running Inference Job",
            Timeout = 60,
            Model = new Hsdp.Inputs.AiInferenceJobModelArgs
            {
                Reference = hsdp_ai_inference_model.Model.Reference,
            },
            ComputeTarget = new Hsdp.Inputs.AiInferenceJobComputeTargetArgs
            {
                Reference = hsdp_ai_inference_compute_target.Target.Reference,
            },
            Inputs = new[]
            {
                new Hsdp.Inputs.AiInferenceJobInputArgs
                {
                    Name = "train",
                    Url = "s3://input-sagemaker-64q6eey/data/input",
                },
            },
            Outputs = new[]
            {
                new Hsdp.Inputs.AiInferenceJobOutputArgs
                {
                    Name = "train",
                    Url = "s3://input-sagemaker-64q6eey/data/prediction",
                },
            },
            Environment = 
            {
                { "FOO", "bar" },
                { "BAR", "baz" },
            },
            CommandArgs = new[]
            {
                "-f",
                "abc",
            },
            Labels = new[]
            {
                "BONEAGE",
                "CNN",
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.hsdp.AiInferenceJob;
    import com.pulumi.hsdp.AiInferenceJobArgs;
    import com.pulumi.hsdp.inputs.AiInferenceJobModelArgs;
    import com.pulumi.hsdp.inputs.AiInferenceJobComputeTargetArgs;
    import com.pulumi.hsdp.inputs.AiInferenceJobInputArgs;
    import com.pulumi.hsdp.inputs.AiInferenceJobOutputArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var job = new AiInferenceJob("job", AiInferenceJobArgs.builder()
                .endpoint(data.hsdp_ai_inference_service_instance().inference().endpoint())
                .description("Long running Inference Job")
                .timeout(60)
                .model(AiInferenceJobModelArgs.builder()
                    .reference(hsdp_ai_inference_model.model().reference())
                    .build())
                .computeTarget(AiInferenceJobComputeTargetArgs.builder()
                    .reference(hsdp_ai_inference_compute_target.target().reference())
                    .build())
                .inputs(AiInferenceJobInputArgs.builder()
                    .name("train")
                    .url("s3://input-sagemaker-64q6eey/data/input")
                    .build())
                .outputs(AiInferenceJobOutputArgs.builder()
                    .name("train")
                    .url("s3://input-sagemaker-64q6eey/data/prediction")
                    .build())
                .environment(Map.ofEntries(
                    Map.entry("FOO", "bar"),
                    Map.entry("BAR", "baz")
                ))
                .commandArgs(            
                    "-f",
                    "abc")
                .labels(            
                    "BONEAGE",
                    "CNN")
                .build());
    
        }
    }
    
    resources:
      job:
        type: hsdp:AiInferenceJob
        properties:
          endpoint: ${data.hsdp_ai_inference_service_instance.inference.endpoint}
          description: Long running Inference Job
          timeout: 60
          model:
            reference: ${hsdp_ai_inference_model.model.reference}
          computeTarget:
            reference: ${hsdp_ai_inference_compute_target.target.reference}
          inputs:
            - name: train
              url: s3://input-sagemaker-64q6eey/data/input
          outputs:
            - name: train
              url: s3://input-sagemaker-64q6eey/data/prediction
          environment:
            FOO: bar
            BAR: baz
          commandArgs:
            - -f
            - abc
          labels:
            - BONEAGE
            - CNN
    

    Attributes reference

    In addition to all arguments above, the following attributes are exported:

    • id - The GUID of the job
    • reference - The reference of this job
    • created - The date this job was created
    • created_by - Who created the environment
    • completed - When the job was completed
    • duration - How long (seconds) the job ran for
    • status - The status of the job
    • status_message - The status message, if available

    Create AiInferenceJob Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new AiInferenceJob(name: string, args: AiInferenceJobArgs, opts?: CustomResourceOptions);
    @overload
    def AiInferenceJob(resource_name: str,
                       args: AiInferenceJobArgs,
                       opts: Optional[ResourceOptions] = None)
    
    @overload
    def AiInferenceJob(resource_name: str,
                       opts: Optional[ResourceOptions] = None,
                       endpoint: Optional[str] = None,
                       ai_inference_job_id: Optional[str] = None,
                       command_args: Optional[Sequence[str]] = None,
                       compute_target: Optional[AiInferenceJobComputeTargetArgs] = None,
                       description: Optional[str] = None,
                       environment: Optional[Mapping[str, str]] = None,
                       inputs: Optional[Sequence[AiInferenceJobInputArgs]] = None,
                       labels: Optional[Sequence[str]] = None,
                       model: Optional[AiInferenceJobModelArgs] = None,
                       name: Optional[str] = None,
                       outputs: Optional[Sequence[AiInferenceJobOutputArgs]] = None,
                       timeout: Optional[float] = None)
    func NewAiInferenceJob(ctx *Context, name string, args AiInferenceJobArgs, opts ...ResourceOption) (*AiInferenceJob, error)
    public AiInferenceJob(string name, AiInferenceJobArgs args, CustomResourceOptions? opts = null)
    public AiInferenceJob(String name, AiInferenceJobArgs args)
    public AiInferenceJob(String name, AiInferenceJobArgs args, CustomResourceOptions options)
    
    type: hsdp:AiInferenceJob
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args AiInferenceJobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args AiInferenceJobArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args AiInferenceJobArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args AiInferenceJobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args AiInferenceJobArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var aiInferenceJobResource = new Hsdp.AiInferenceJob("aiInferenceJobResource", new()
    {
        Endpoint = "string",
        AiInferenceJobId = "string",
        CommandArgs = new[]
        {
            "string",
        },
        ComputeTarget = new Hsdp.Inputs.AiInferenceJobComputeTargetArgs
        {
            Identifier = "string",
            Reference = "string",
        },
        Description = "string",
        Environment = 
        {
            { "string", "string" },
        },
        Inputs = new[]
        {
            new Hsdp.Inputs.AiInferenceJobInputArgs
            {
                Url = "string",
                Name = "string",
            },
        },
        Labels = new[]
        {
            "string",
        },
        Model = new Hsdp.Inputs.AiInferenceJobModelArgs
        {
            Identifier = "string",
            Reference = "string",
        },
        Name = "string",
        Outputs = new[]
        {
            new Hsdp.Inputs.AiInferenceJobOutputArgs
            {
                Url = "string",
                Name = "string",
            },
        },
        Timeout = 0,
    });
    
    example, err := hsdp.NewAiInferenceJob(ctx, "aiInferenceJobResource", &hsdp.AiInferenceJobArgs{
    	Endpoint:         pulumi.String("string"),
    	AiInferenceJobId: pulumi.String("string"),
    	CommandArgs: pulumi.StringArray{
    		pulumi.String("string"),
    	},
    	ComputeTarget: &hsdp.AiInferenceJobComputeTargetArgs{
    		Identifier: pulumi.String("string"),
    		Reference:  pulumi.String("string"),
    	},
    	Description: pulumi.String("string"),
    	Environment: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	Inputs: hsdp.AiInferenceJobInputTypeArray{
    		&hsdp.AiInferenceJobInputTypeArgs{
    			Url:  pulumi.String("string"),
    			Name: pulumi.String("string"),
    		},
    	},
    	Labels: pulumi.StringArray{
    		pulumi.String("string"),
    	},
    	Model: &hsdp.AiInferenceJobModelArgs{
    		Identifier: pulumi.String("string"),
    		Reference:  pulumi.String("string"),
    	},
    	Name: pulumi.String("string"),
    	Outputs: hsdp.AiInferenceJobOutputTypeArray{
    		&hsdp.AiInferenceJobOutputTypeArgs{
    			Url:  pulumi.String("string"),
    			Name: pulumi.String("string"),
    		},
    	},
    	Timeout: pulumi.Float64(0),
    })
    
    var aiInferenceJobResource = new AiInferenceJob("aiInferenceJobResource", AiInferenceJobArgs.builder()
        .endpoint("string")
        .aiInferenceJobId("string")
        .commandArgs("string")
        .computeTarget(AiInferenceJobComputeTargetArgs.builder()
            .identifier("string")
            .reference("string")
            .build())
        .description("string")
        .environment(Map.of("string", "string"))
        .inputs(AiInferenceJobInputArgs.builder()
            .url("string")
            .name("string")
            .build())
        .labels("string")
        .model(AiInferenceJobModelArgs.builder()
            .identifier("string")
            .reference("string")
            .build())
        .name("string")
        .outputs(AiInferenceJobOutputArgs.builder()
            .url("string")
            .name("string")
            .build())
        .timeout(0)
        .build());
    
    ai_inference_job_resource = hsdp.AiInferenceJob("aiInferenceJobResource",
        endpoint="string",
        ai_inference_job_id="string",
        command_args=["string"],
        compute_target={
            "identifier": "string",
            "reference": "string",
        },
        description="string",
        environment={
            "string": "string",
        },
        inputs=[{
            "url": "string",
            "name": "string",
        }],
        labels=["string"],
        model={
            "identifier": "string",
            "reference": "string",
        },
        name="string",
        outputs=[{
            "url": "string",
            "name": "string",
        }],
        timeout=0)
    
    const aiInferenceJobResource = new hsdp.AiInferenceJob("aiInferenceJobResource", {
        endpoint: "string",
        aiInferenceJobId: "string",
        commandArgs: ["string"],
        computeTarget: {
            identifier: "string",
            reference: "string",
        },
        description: "string",
        environment: {
            string: "string",
        },
        inputs: [{
            url: "string",
            name: "string",
        }],
        labels: ["string"],
        model: {
            identifier: "string",
            reference: "string",
        },
        name: "string",
        outputs: [{
            url: "string",
            name: "string",
        }],
        timeout: 0,
    });
    
    type: hsdp:AiInferenceJob
    properties:
        aiInferenceJobId: string
        commandArgs:
            - string
        computeTarget:
            identifier: string
            reference: string
        description: string
        endpoint: string
        environment:
            string: string
        inputs:
            - name: string
              url: string
        labels:
            - string
        model:
            identifier: string
            reference: string
        name: string
        outputs:
            - name: string
              url: string
        timeout: 0
    

    AiInferenceJob Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The AiInferenceJob resource accepts the following input properties:

    Endpoint string
    The AI Inference instance endpoint
    AiInferenceJobId string
    CommandArgs List<string>
    ) Arguments to use for job
    ComputeTarget AiInferenceJobComputeTarget
    The compute Target to use
    Description string
    Description of the Compute Target
    Environment Dictionary<string, string>
    Environment to set for Job
    Inputs List<AiInferenceJobInput>
    Input data. Can have mulitple
    Labels List<string>
    Model AiInferenceJobModel
    The model to use
    Name string
    The name of Compute Environment
    Outputs List<AiInferenceJobOutput>
    Output data. Can have mulitple
    Timeout double
    How long the job should run max.
    Endpoint string
    The AI Inference instance endpoint
    AiInferenceJobId string
    CommandArgs []string
    ) Arguments to use for job
    ComputeTarget AiInferenceJobComputeTargetArgs
    The compute Target to use
    Description string
    Description of the Compute Target
    Environment map[string]string
    Environment to set for Job
    Inputs []AiInferenceJobInputTypeArgs
    Input data. Can have mulitple
    Labels []string
    Model AiInferenceJobModelArgs
    The model to use
    Name string
    The name of Compute Environment
    Outputs []AiInferenceJobOutputTypeArgs
    Output data. Can have mulitple
    Timeout float64
    How long the job should run max.
    endpoint String
    The AI Inference instance endpoint
    aiInferenceJobId String
    commandArgs List<String>
    ) Arguments to use for job
    computeTarget AiInferenceJobComputeTarget
    The compute Target to use
    description String
    Description of the Compute Target
    environment Map<String,String>
    Environment to set for Job
    inputs List<AiInferenceJobInput>
    Input data. Can have mulitple
    labels List<String>
    model AiInferenceJobModel
    The model to use
    name String
    The name of Compute Environment
    outputs List<AiInferenceJobOutput>
    Output data. Can have mulitple
    timeout Double
    How long the job should run max.
    endpoint string
    The AI Inference instance endpoint
    aiInferenceJobId string
    commandArgs string[]
    ) Arguments to use for job
    computeTarget AiInferenceJobComputeTarget
    The compute Target to use
    description string
    Description of the Compute Target
    environment {[key: string]: string}
    Environment to set for Job
    inputs AiInferenceJobInput[]
    Input data. Can have mulitple
    labels string[]
    model AiInferenceJobModel
    The model to use
    name string
    The name of Compute Environment
    outputs AiInferenceJobOutput[]
    Output data. Can have mulitple
    timeout number
    How long the job should run max.
    endpoint str
    The AI Inference instance endpoint
    ai_inference_job_id str
    command_args Sequence[str]
    ) Arguments to use for job
    compute_target AiInferenceJobComputeTargetArgs
    The compute Target to use
    description str
    Description of the Compute Target
    environment Mapping[str, str]
    Environment to set for Job
    inputs Sequence[AiInferenceJobInputArgs]
    Input data. Can have mulitple
    labels Sequence[str]
    model AiInferenceJobModelArgs
    The model to use
    name str
    The name of Compute Environment
    outputs Sequence[AiInferenceJobOutputArgs]
    Output data. Can have mulitple
    timeout float
    How long the job should run max.
    endpoint String
    The AI Inference instance endpoint
    aiInferenceJobId String
    commandArgs List<String>
    ) Arguments to use for job
    computeTarget Property Map
    The compute Target to use
    description String
    Description of the Compute Target
    environment Map<String>
    Environment to set for Job
    inputs List<Property Map>
    Input data. Can have mulitple
    labels List<String>
    model Property Map
    The model to use
    name String
    The name of Compute Environment
    outputs List<Property Map>
    Output data. Can have mulitple
    timeout Number
    How long the job should run max.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the AiInferenceJob resource produces the following output properties:

    Completed string
    Created string
    CreatedBy string
    Duration double
    Id string
    The provider-assigned unique ID for this managed resource.
    Reference string
    Status string
    StatusMessage string
    Completed string
    Created string
    CreatedBy string
    Duration float64
    Id string
    The provider-assigned unique ID for this managed resource.
    Reference string
    Status string
    StatusMessage string
    completed String
    created String
    createdBy String
    duration Double
    id String
    The provider-assigned unique ID for this managed resource.
    reference String
    status String
    statusMessage String
    completed string
    created string
    createdBy string
    duration number
    id string
    The provider-assigned unique ID for this managed resource.
    reference string
    status string
    statusMessage string
    completed str
    created str
    created_by str
    duration float
    id str
    The provider-assigned unique ID for this managed resource.
    reference str
    status str
    status_message str
    completed String
    created String
    createdBy String
    duration Number
    id String
    The provider-assigned unique ID for this managed resource.
    reference String
    status String
    statusMessage String

    Look up Existing AiInferenceJob Resource

    Get an existing AiInferenceJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: AiInferenceJobState, opts?: CustomResourceOptions): AiInferenceJob
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            ai_inference_job_id: Optional[str] = None,
            command_args: Optional[Sequence[str]] = None,
            completed: Optional[str] = None,
            compute_target: Optional[AiInferenceJobComputeTargetArgs] = None,
            created: Optional[str] = None,
            created_by: Optional[str] = None,
            description: Optional[str] = None,
            duration: Optional[float] = None,
            endpoint: Optional[str] = None,
            environment: Optional[Mapping[str, str]] = None,
            inputs: Optional[Sequence[AiInferenceJobInputArgs]] = None,
            labels: Optional[Sequence[str]] = None,
            model: Optional[AiInferenceJobModelArgs] = None,
            name: Optional[str] = None,
            outputs: Optional[Sequence[AiInferenceJobOutputArgs]] = None,
            reference: Optional[str] = None,
            status: Optional[str] = None,
            status_message: Optional[str] = None,
            timeout: Optional[float] = None) -> AiInferenceJob
    func GetAiInferenceJob(ctx *Context, name string, id IDInput, state *AiInferenceJobState, opts ...ResourceOption) (*AiInferenceJob, error)
    public static AiInferenceJob Get(string name, Input<string> id, AiInferenceJobState? state, CustomResourceOptions? opts = null)
    public static AiInferenceJob get(String name, Output<String> id, AiInferenceJobState state, CustomResourceOptions options)
    resources:  _:    type: hsdp:AiInferenceJob    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AiInferenceJobId string
    CommandArgs List<string>
    ) Arguments to use for job
    Completed string
    ComputeTarget AiInferenceJobComputeTarget
    The compute Target to use
    Created string
    CreatedBy string
    Description string
    Description of the Compute Target
    Duration double
    Endpoint string
    The AI Inference instance endpoint
    Environment Dictionary<string, string>
    Environment to set for Job
    Inputs List<AiInferenceJobInput>
    Input data. Can have mulitple
    Labels List<string>
    Model AiInferenceJobModel
    The model to use
    Name string
    The name of Compute Environment
    Outputs List<AiInferenceJobOutput>
    Output data. Can have mulitple
    Reference string
    Status string
    StatusMessage string
    Timeout double
    How long the job should run max.
    AiInferenceJobId string
    CommandArgs []string
    ) Arguments to use for job
    Completed string
    ComputeTarget AiInferenceJobComputeTargetArgs
    The compute Target to use
    Created string
    CreatedBy string
    Description string
    Description of the Compute Target
    Duration float64
    Endpoint string
    The AI Inference instance endpoint
    Environment map[string]string
    Environment to set for Job
    Inputs []AiInferenceJobInputTypeArgs
    Input data. Can have mulitple
    Labels []string
    Model AiInferenceJobModelArgs
    The model to use
    Name string
    The name of Compute Environment
    Outputs []AiInferenceJobOutputTypeArgs
    Output data. Can have mulitple
    Reference string
    Status string
    StatusMessage string
    Timeout float64
    How long the job should run max.
    aiInferenceJobId String
    commandArgs List<String>
    ) Arguments to use for job
    completed String
    computeTarget AiInferenceJobComputeTarget
    The compute Target to use
    created String
    createdBy String
    description String
    Description of the Compute Target
    duration Double
    endpoint String
    The AI Inference instance endpoint
    environment Map<String,String>
    Environment to set for Job
    inputs List<AiInferenceJobInput>
    Input data. Can have mulitple
    labels List<String>
    model AiInferenceJobModel
    The model to use
    name String
    The name of Compute Environment
    outputs List<AiInferenceJobOutput>
    Output data. Can have mulitple
    reference String
    status String
    statusMessage String
    timeout Double
    How long the job should run max.
    aiInferenceJobId string
    commandArgs string[]
    ) Arguments to use for job
    completed string
    computeTarget AiInferenceJobComputeTarget
    The compute Target to use
    created string
    createdBy string
    description string
    Description of the Compute Target
    duration number
    endpoint string
    The AI Inference instance endpoint
    environment {[key: string]: string}
    Environment to set for Job
    inputs AiInferenceJobInput[]
    Input data. Can have mulitple
    labels string[]
    model AiInferenceJobModel
    The model to use
    name string
    The name of Compute Environment
    outputs AiInferenceJobOutput[]
    Output data. Can have mulitple
    reference string
    status string
    statusMessage string
    timeout number
    How long the job should run max.
    ai_inference_job_id str
    command_args Sequence[str]
    ) Arguments to use for job
    completed str
    compute_target AiInferenceJobComputeTargetArgs
    The compute Target to use
    created str
    created_by str
    description str
    Description of the Compute Target
    duration float
    endpoint str
    The AI Inference instance endpoint
    environment Mapping[str, str]
    Environment to set for Job
    inputs Sequence[AiInferenceJobInputArgs]
    Input data. Can have mulitple
    labels Sequence[str]
    model AiInferenceJobModelArgs
    The model to use
    name str
    The name of Compute Environment
    outputs Sequence[AiInferenceJobOutputArgs]
    Output data. Can have mulitple
    reference str
    status str
    status_message str
    timeout float
    How long the job should run max.
    aiInferenceJobId String
    commandArgs List<String>
    ) Arguments to use for job
    completed String
    computeTarget Property Map
    The compute Target to use
    created String
    createdBy String
    description String
    Description of the Compute Target
    duration Number
    endpoint String
    The AI Inference instance endpoint
    environment Map<String>
    Environment to set for Job
    inputs List<Property Map>
    Input data. Can have mulitple
    labels List<String>
    model Property Map
    The model to use
    name String
    The name of Compute Environment
    outputs List<Property Map>
    Output data. Can have mulitple
    reference String
    status String
    statusMessage String
    timeout Number
    How long the job should run max.

    Supporting Types

    AiInferenceJobComputeTarget, AiInferenceJobComputeTargetArgs

    Identifier string
    Reference string
    The reference of the Compute Target
    Identifier string
    Reference string
    The reference of the Compute Target
    identifier String
    reference String
    The reference of the Compute Target
    identifier string
    reference string
    The reference of the Compute Target
    identifier str
    reference str
    The reference of the Compute Target
    identifier String
    reference String
    The reference of the Compute Target

    AiInferenceJobInput, AiInferenceJobInputArgs

    Url string
    URL pointing to the input
    Name string
    Name of the input
    Url string
    URL pointing to the input
    Name string
    Name of the input
    url String
    URL pointing to the input
    name String
    Name of the input
    url string
    URL pointing to the input
    name string
    Name of the input
    url str
    URL pointing to the input
    name str
    Name of the input
    url String
    URL pointing to the input
    name String
    Name of the input

    AiInferenceJobModel, AiInferenceJobModelArgs

    Identifier string
    Reference string
    The reference of the Inference module
    Identifier string
    Reference string
    The reference of the Inference module
    identifier String
    reference String
    The reference of the Inference module
    identifier string
    reference string
    The reference of the Inference module
    identifier str
    reference str
    The reference of the Inference module
    identifier String
    reference String
    The reference of the Inference module

    AiInferenceJobOutput, AiInferenceJobOutputArgs

    Url string
    URL pointing to the output
    Name string
    Name of the output
    Url string
    URL pointing to the output
    Name string
    Name of the output
    url String
    URL pointing to the output
    name String
    Name of the output
    url string
    URL pointing to the output
    name string
    Name of the output
    url str
    URL pointing to the output
    name str
    Name of the output
    url String
    URL pointing to the output
    name String
    Name of the output

    Import

    $ pulumi import hsdp:index/aiInferenceJob:AiInferenceJob An existing Compute Environment can be imported using `hsdp_ai_inference_compute_target`, e.g.
    

    bash

    $ pulumi import hsdp:index/aiInferenceJob:AiInferenceJob target a-guid
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    hsdp philips-software/terraform-provider-hsdp
    License
    Notes
    This Pulumi package is based on the hsdp Terraform Provider.
    hsdp logo
    hsdp 0.65.3 published on Tuesday, Apr 15, 2025 by philips-software