hsdp.AiInferenceJob
Explore with Pulumi AI
Manages HSDP AI Inference Jobs
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as hsdp from "@pulumi/hsdp";
const job = new hsdp.AiInferenceJob("job", {
endpoint: data.hsdp_ai_inference_service_instance.inference.endpoint,
description: "Long running Inference Job",
timeout: 60,
model: {
reference: hsdp_ai_inference_model.model.reference,
},
computeTarget: {
reference: hsdp_ai_inference_compute_target.target.reference,
},
inputs: [{
name: "train",
url: "s3://input-sagemaker-64q6eey/data/input",
}],
outputs: [{
name: "train",
url: "s3://input-sagemaker-64q6eey/data/prediction",
}],
environment: {
FOO: "bar",
BAR: "baz",
},
commandArgs: [
"-f",
"abc",
],
labels: [
"BONEAGE",
"CNN",
],
});
import pulumi
import pulumi_hsdp as hsdp
job = hsdp.AiInferenceJob("job",
endpoint=data["hsdp_ai_inference_service_instance"]["inference"]["endpoint"],
description="Long running Inference Job",
timeout=60,
model={
"reference": hsdp_ai_inference_model["model"]["reference"],
},
compute_target={
"reference": hsdp_ai_inference_compute_target["target"]["reference"],
},
inputs=[{
"name": "train",
"url": "s3://input-sagemaker-64q6eey/data/input",
}],
outputs=[{
"name": "train",
"url": "s3://input-sagemaker-64q6eey/data/prediction",
}],
environment={
"FOO": "bar",
"BAR": "baz",
},
command_args=[
"-f",
"abc",
],
labels=[
"BONEAGE",
"CNN",
])
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/hsdp/hsdp"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := hsdp.NewAiInferenceJob(ctx, "job", &hsdp.AiInferenceJobArgs{
Endpoint: pulumi.Any(data.Hsdp_ai_inference_service_instance.Inference.Endpoint),
Description: pulumi.String("Long running Inference Job"),
Timeout: pulumi.Float64(60),
Model: &hsdp.AiInferenceJobModelArgs{
Reference: pulumi.Any(hsdp_ai_inference_model.Model.Reference),
},
ComputeTarget: &hsdp.AiInferenceJobComputeTargetArgs{
Reference: pulumi.Any(hsdp_ai_inference_compute_target.Target.Reference),
},
Inputs: hsdp.AiInferenceJobInputTypeArray{
&hsdp.AiInferenceJobInputTypeArgs{
Name: pulumi.String("train"),
Url: pulumi.String("s3://input-sagemaker-64q6eey/data/input"),
},
},
Outputs: hsdp.AiInferenceJobOutputTypeArray{
&hsdp.AiInferenceJobOutputTypeArgs{
Name: pulumi.String("train"),
Url: pulumi.String("s3://input-sagemaker-64q6eey/data/prediction"),
},
},
Environment: pulumi.StringMap{
"FOO": pulumi.String("bar"),
"BAR": pulumi.String("baz"),
},
CommandArgs: pulumi.StringArray{
pulumi.String("-f"),
pulumi.String("abc"),
},
Labels: pulumi.StringArray{
pulumi.String("BONEAGE"),
pulumi.String("CNN"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Hsdp = Pulumi.Hsdp;
return await Deployment.RunAsync(() =>
{
var job = new Hsdp.AiInferenceJob("job", new()
{
Endpoint = data.Hsdp_ai_inference_service_instance.Inference.Endpoint,
Description = "Long running Inference Job",
Timeout = 60,
Model = new Hsdp.Inputs.AiInferenceJobModelArgs
{
Reference = hsdp_ai_inference_model.Model.Reference,
},
ComputeTarget = new Hsdp.Inputs.AiInferenceJobComputeTargetArgs
{
Reference = hsdp_ai_inference_compute_target.Target.Reference,
},
Inputs = new[]
{
new Hsdp.Inputs.AiInferenceJobInputArgs
{
Name = "train",
Url = "s3://input-sagemaker-64q6eey/data/input",
},
},
Outputs = new[]
{
new Hsdp.Inputs.AiInferenceJobOutputArgs
{
Name = "train",
Url = "s3://input-sagemaker-64q6eey/data/prediction",
},
},
Environment =
{
{ "FOO", "bar" },
{ "BAR", "baz" },
},
CommandArgs = new[]
{
"-f",
"abc",
},
Labels = new[]
{
"BONEAGE",
"CNN",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.hsdp.AiInferenceJob;
import com.pulumi.hsdp.AiInferenceJobArgs;
import com.pulumi.hsdp.inputs.AiInferenceJobModelArgs;
import com.pulumi.hsdp.inputs.AiInferenceJobComputeTargetArgs;
import com.pulumi.hsdp.inputs.AiInferenceJobInputArgs;
import com.pulumi.hsdp.inputs.AiInferenceJobOutputArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var job = new AiInferenceJob("job", AiInferenceJobArgs.builder()
.endpoint(data.hsdp_ai_inference_service_instance().inference().endpoint())
.description("Long running Inference Job")
.timeout(60)
.model(AiInferenceJobModelArgs.builder()
.reference(hsdp_ai_inference_model.model().reference())
.build())
.computeTarget(AiInferenceJobComputeTargetArgs.builder()
.reference(hsdp_ai_inference_compute_target.target().reference())
.build())
.inputs(AiInferenceJobInputArgs.builder()
.name("train")
.url("s3://input-sagemaker-64q6eey/data/input")
.build())
.outputs(AiInferenceJobOutputArgs.builder()
.name("train")
.url("s3://input-sagemaker-64q6eey/data/prediction")
.build())
.environment(Map.ofEntries(
Map.entry("FOO", "bar"),
Map.entry("BAR", "baz")
))
.commandArgs(
"-f",
"abc")
.labels(
"BONEAGE",
"CNN")
.build());
}
}
resources:
job:
type: hsdp:AiInferenceJob
properties:
endpoint: ${data.hsdp_ai_inference_service_instance.inference.endpoint}
description: Long running Inference Job
timeout: 60
model:
reference: ${hsdp_ai_inference_model.model.reference}
computeTarget:
reference: ${hsdp_ai_inference_compute_target.target.reference}
inputs:
- name: train
url: s3://input-sagemaker-64q6eey/data/input
outputs:
- name: train
url: s3://input-sagemaker-64q6eey/data/prediction
environment:
FOO: bar
BAR: baz
commandArgs:
- -f
- abc
labels:
- BONEAGE
- CNN
Attributes reference
In addition to all arguments above, the following attributes are exported:
id
- The GUID of the jobreference
- The reference of this jobcreated
- The date this job was createdcreated_by
- Who created the environmentcompleted
- When the job was completedduration
- How long (seconds) the job ran forstatus
- The status of the jobstatus_message
- The status message, if available
Create AiInferenceJob Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new AiInferenceJob(name: string, args: AiInferenceJobArgs, opts?: CustomResourceOptions);
@overload
def AiInferenceJob(resource_name: str,
args: AiInferenceJobArgs,
opts: Optional[ResourceOptions] = None)
@overload
def AiInferenceJob(resource_name: str,
opts: Optional[ResourceOptions] = None,
endpoint: Optional[str] = None,
ai_inference_job_id: Optional[str] = None,
command_args: Optional[Sequence[str]] = None,
compute_target: Optional[AiInferenceJobComputeTargetArgs] = None,
description: Optional[str] = None,
environment: Optional[Mapping[str, str]] = None,
inputs: Optional[Sequence[AiInferenceJobInputArgs]] = None,
labels: Optional[Sequence[str]] = None,
model: Optional[AiInferenceJobModelArgs] = None,
name: Optional[str] = None,
outputs: Optional[Sequence[AiInferenceJobOutputArgs]] = None,
timeout: Optional[float] = None)
func NewAiInferenceJob(ctx *Context, name string, args AiInferenceJobArgs, opts ...ResourceOption) (*AiInferenceJob, error)
public AiInferenceJob(string name, AiInferenceJobArgs args, CustomResourceOptions? opts = null)
public AiInferenceJob(String name, AiInferenceJobArgs args)
public AiInferenceJob(String name, AiInferenceJobArgs args, CustomResourceOptions options)
type: hsdp:AiInferenceJob
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args AiInferenceJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args AiInferenceJobArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args AiInferenceJobArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args AiInferenceJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args AiInferenceJobArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var aiInferenceJobResource = new Hsdp.AiInferenceJob("aiInferenceJobResource", new()
{
Endpoint = "string",
AiInferenceJobId = "string",
CommandArgs = new[]
{
"string",
},
ComputeTarget = new Hsdp.Inputs.AiInferenceJobComputeTargetArgs
{
Identifier = "string",
Reference = "string",
},
Description = "string",
Environment =
{
{ "string", "string" },
},
Inputs = new[]
{
new Hsdp.Inputs.AiInferenceJobInputArgs
{
Url = "string",
Name = "string",
},
},
Labels = new[]
{
"string",
},
Model = new Hsdp.Inputs.AiInferenceJobModelArgs
{
Identifier = "string",
Reference = "string",
},
Name = "string",
Outputs = new[]
{
new Hsdp.Inputs.AiInferenceJobOutputArgs
{
Url = "string",
Name = "string",
},
},
Timeout = 0,
});
example, err := hsdp.NewAiInferenceJob(ctx, "aiInferenceJobResource", &hsdp.AiInferenceJobArgs{
Endpoint: pulumi.String("string"),
AiInferenceJobId: pulumi.String("string"),
CommandArgs: pulumi.StringArray{
pulumi.String("string"),
},
ComputeTarget: &hsdp.AiInferenceJobComputeTargetArgs{
Identifier: pulumi.String("string"),
Reference: pulumi.String("string"),
},
Description: pulumi.String("string"),
Environment: pulumi.StringMap{
"string": pulumi.String("string"),
},
Inputs: hsdp.AiInferenceJobInputTypeArray{
&hsdp.AiInferenceJobInputTypeArgs{
Url: pulumi.String("string"),
Name: pulumi.String("string"),
},
},
Labels: pulumi.StringArray{
pulumi.String("string"),
},
Model: &hsdp.AiInferenceJobModelArgs{
Identifier: pulumi.String("string"),
Reference: pulumi.String("string"),
},
Name: pulumi.String("string"),
Outputs: hsdp.AiInferenceJobOutputTypeArray{
&hsdp.AiInferenceJobOutputTypeArgs{
Url: pulumi.String("string"),
Name: pulumi.String("string"),
},
},
Timeout: pulumi.Float64(0),
})
var aiInferenceJobResource = new AiInferenceJob("aiInferenceJobResource", AiInferenceJobArgs.builder()
.endpoint("string")
.aiInferenceJobId("string")
.commandArgs("string")
.computeTarget(AiInferenceJobComputeTargetArgs.builder()
.identifier("string")
.reference("string")
.build())
.description("string")
.environment(Map.of("string", "string"))
.inputs(AiInferenceJobInputArgs.builder()
.url("string")
.name("string")
.build())
.labels("string")
.model(AiInferenceJobModelArgs.builder()
.identifier("string")
.reference("string")
.build())
.name("string")
.outputs(AiInferenceJobOutputArgs.builder()
.url("string")
.name("string")
.build())
.timeout(0)
.build());
ai_inference_job_resource = hsdp.AiInferenceJob("aiInferenceJobResource",
endpoint="string",
ai_inference_job_id="string",
command_args=["string"],
compute_target={
"identifier": "string",
"reference": "string",
},
description="string",
environment={
"string": "string",
},
inputs=[{
"url": "string",
"name": "string",
}],
labels=["string"],
model={
"identifier": "string",
"reference": "string",
},
name="string",
outputs=[{
"url": "string",
"name": "string",
}],
timeout=0)
const aiInferenceJobResource = new hsdp.AiInferenceJob("aiInferenceJobResource", {
endpoint: "string",
aiInferenceJobId: "string",
commandArgs: ["string"],
computeTarget: {
identifier: "string",
reference: "string",
},
description: "string",
environment: {
string: "string",
},
inputs: [{
url: "string",
name: "string",
}],
labels: ["string"],
model: {
identifier: "string",
reference: "string",
},
name: "string",
outputs: [{
url: "string",
name: "string",
}],
timeout: 0,
});
type: hsdp:AiInferenceJob
properties:
aiInferenceJobId: string
commandArgs:
- string
computeTarget:
identifier: string
reference: string
description: string
endpoint: string
environment:
string: string
inputs:
- name: string
url: string
labels:
- string
model:
identifier: string
reference: string
name: string
outputs:
- name: string
url: string
timeout: 0
AiInferenceJob Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The AiInferenceJob resource accepts the following input properties:
- Endpoint string
- The AI Inference instance endpoint
- Ai
Inference stringJob Id - Command
Args List<string> - ) Arguments to use for job
- Compute
Target AiInference Job Compute Target - The compute Target to use
- Description string
- Description of the Compute Target
- Environment Dictionary<string, string>
- Environment to set for Job
- Inputs
List<Ai
Inference Job Input> - Input data. Can have mulitple
- Labels List<string>
- Model
Ai
Inference Job Model - The model to use
- Name string
- The name of Compute Environment
- Outputs
List<Ai
Inference Job Output> - Output data. Can have mulitple
- Timeout double
- How long the job should run max.
- Endpoint string
- The AI Inference instance endpoint
- Ai
Inference stringJob Id - Command
Args []string - ) Arguments to use for job
- Compute
Target AiInference Job Compute Target Args - The compute Target to use
- Description string
- Description of the Compute Target
- Environment map[string]string
- Environment to set for Job
- Inputs
[]Ai
Inference Job Input Type Args - Input data. Can have mulitple
- Labels []string
- Model
Ai
Inference Job Model Args - The model to use
- Name string
- The name of Compute Environment
- Outputs
[]Ai
Inference Job Output Type Args - Output data. Can have mulitple
- Timeout float64
- How long the job should run max.
- endpoint String
- The AI Inference instance endpoint
- ai
Inference StringJob Id - command
Args List<String> - ) Arguments to use for job
- compute
Target AiInference Job Compute Target - The compute Target to use
- description String
- Description of the Compute Target
- environment Map<String,String>
- Environment to set for Job
- inputs
List<Ai
Inference Job Input> - Input data. Can have mulitple
- labels List<String>
- model
Ai
Inference Job Model - The model to use
- name String
- The name of Compute Environment
- outputs
List<Ai
Inference Job Output> - Output data. Can have mulitple
- timeout Double
- How long the job should run max.
- endpoint string
- The AI Inference instance endpoint
- ai
Inference stringJob Id - command
Args string[] - ) Arguments to use for job
- compute
Target AiInference Job Compute Target - The compute Target to use
- description string
- Description of the Compute Target
- environment {[key: string]: string}
- Environment to set for Job
- inputs
Ai
Inference Job Input[] - Input data. Can have mulitple
- labels string[]
- model
Ai
Inference Job Model - The model to use
- name string
- The name of Compute Environment
- outputs
Ai
Inference Job Output[] - Output data. Can have mulitple
- timeout number
- How long the job should run max.
- endpoint str
- The AI Inference instance endpoint
- ai_
inference_ strjob_ id - command_
args Sequence[str] - ) Arguments to use for job
- compute_
target AiInference Job Compute Target Args - The compute Target to use
- description str
- Description of the Compute Target
- environment Mapping[str, str]
- Environment to set for Job
- inputs
Sequence[Ai
Inference Job Input Args] - Input data. Can have mulitple
- labels Sequence[str]
- model
Ai
Inference Job Model Args - The model to use
- name str
- The name of Compute Environment
- outputs
Sequence[Ai
Inference Job Output Args] - Output data. Can have mulitple
- timeout float
- How long the job should run max.
- endpoint String
- The AI Inference instance endpoint
- ai
Inference StringJob Id - command
Args List<String> - ) Arguments to use for job
- compute
Target Property Map - The compute Target to use
- description String
- Description of the Compute Target
- environment Map<String>
- Environment to set for Job
- inputs List<Property Map>
- Input data. Can have mulitple
- labels List<String>
- model Property Map
- The model to use
- name String
- The name of Compute Environment
- outputs List<Property Map>
- Output data. Can have mulitple
- timeout Number
- How long the job should run max.
Outputs
All input properties are implicitly available as output properties. Additionally, the AiInferenceJob resource produces the following output properties:
- completed str
- created str
- created_
by str - duration float
- id str
- The provider-assigned unique ID for this managed resource.
- reference str
- status str
- status_
message str
Look up Existing AiInferenceJob Resource
Get an existing AiInferenceJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: AiInferenceJobState, opts?: CustomResourceOptions): AiInferenceJob
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
ai_inference_job_id: Optional[str] = None,
command_args: Optional[Sequence[str]] = None,
completed: Optional[str] = None,
compute_target: Optional[AiInferenceJobComputeTargetArgs] = None,
created: Optional[str] = None,
created_by: Optional[str] = None,
description: Optional[str] = None,
duration: Optional[float] = None,
endpoint: Optional[str] = None,
environment: Optional[Mapping[str, str]] = None,
inputs: Optional[Sequence[AiInferenceJobInputArgs]] = None,
labels: Optional[Sequence[str]] = None,
model: Optional[AiInferenceJobModelArgs] = None,
name: Optional[str] = None,
outputs: Optional[Sequence[AiInferenceJobOutputArgs]] = None,
reference: Optional[str] = None,
status: Optional[str] = None,
status_message: Optional[str] = None,
timeout: Optional[float] = None) -> AiInferenceJob
func GetAiInferenceJob(ctx *Context, name string, id IDInput, state *AiInferenceJobState, opts ...ResourceOption) (*AiInferenceJob, error)
public static AiInferenceJob Get(string name, Input<string> id, AiInferenceJobState? state, CustomResourceOptions? opts = null)
public static AiInferenceJob get(String name, Output<String> id, AiInferenceJobState state, CustomResourceOptions options)
resources: _: type: hsdp:AiInferenceJob get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Ai
Inference stringJob Id - Command
Args List<string> - ) Arguments to use for job
- Completed string
- Compute
Target AiInference Job Compute Target - The compute Target to use
- Created string
- Created
By string - Description string
- Description of the Compute Target
- Duration double
- Endpoint string
- The AI Inference instance endpoint
- Environment Dictionary<string, string>
- Environment to set for Job
- Inputs
List<Ai
Inference Job Input> - Input data. Can have mulitple
- Labels List<string>
- Model
Ai
Inference Job Model - The model to use
- Name string
- The name of Compute Environment
- Outputs
List<Ai
Inference Job Output> - Output data. Can have mulitple
- Reference string
- Status string
- Status
Message string - Timeout double
- How long the job should run max.
- Ai
Inference stringJob Id - Command
Args []string - ) Arguments to use for job
- Completed string
- Compute
Target AiInference Job Compute Target Args - The compute Target to use
- Created string
- Created
By string - Description string
- Description of the Compute Target
- Duration float64
- Endpoint string
- The AI Inference instance endpoint
- Environment map[string]string
- Environment to set for Job
- Inputs
[]Ai
Inference Job Input Type Args - Input data. Can have mulitple
- Labels []string
- Model
Ai
Inference Job Model Args - The model to use
- Name string
- The name of Compute Environment
- Outputs
[]Ai
Inference Job Output Type Args - Output data. Can have mulitple
- Reference string
- Status string
- Status
Message string - Timeout float64
- How long the job should run max.
- ai
Inference StringJob Id - command
Args List<String> - ) Arguments to use for job
- completed String
- compute
Target AiInference Job Compute Target - The compute Target to use
- created String
- created
By String - description String
- Description of the Compute Target
- duration Double
- endpoint String
- The AI Inference instance endpoint
- environment Map<String,String>
- Environment to set for Job
- inputs
List<Ai
Inference Job Input> - Input data. Can have mulitple
- labels List<String>
- model
Ai
Inference Job Model - The model to use
- name String
- The name of Compute Environment
- outputs
List<Ai
Inference Job Output> - Output data. Can have mulitple
- reference String
- status String
- status
Message String - timeout Double
- How long the job should run max.
- ai
Inference stringJob Id - command
Args string[] - ) Arguments to use for job
- completed string
- compute
Target AiInference Job Compute Target - The compute Target to use
- created string
- created
By string - description string
- Description of the Compute Target
- duration number
- endpoint string
- The AI Inference instance endpoint
- environment {[key: string]: string}
- Environment to set for Job
- inputs
Ai
Inference Job Input[] - Input data. Can have mulitple
- labels string[]
- model
Ai
Inference Job Model - The model to use
- name string
- The name of Compute Environment
- outputs
Ai
Inference Job Output[] - Output data. Can have mulitple
- reference string
- status string
- status
Message string - timeout number
- How long the job should run max.
- ai_
inference_ strjob_ id - command_
args Sequence[str] - ) Arguments to use for job
- completed str
- compute_
target AiInference Job Compute Target Args - The compute Target to use
- created str
- created_
by str - description str
- Description of the Compute Target
- duration float
- endpoint str
- The AI Inference instance endpoint
- environment Mapping[str, str]
- Environment to set for Job
- inputs
Sequence[Ai
Inference Job Input Args] - Input data. Can have mulitple
- labels Sequence[str]
- model
Ai
Inference Job Model Args - The model to use
- name str
- The name of Compute Environment
- outputs
Sequence[Ai
Inference Job Output Args] - Output data. Can have mulitple
- reference str
- status str
- status_
message str - timeout float
- How long the job should run max.
- ai
Inference StringJob Id - command
Args List<String> - ) Arguments to use for job
- completed String
- compute
Target Property Map - The compute Target to use
- created String
- created
By String - description String
- Description of the Compute Target
- duration Number
- endpoint String
- The AI Inference instance endpoint
- environment Map<String>
- Environment to set for Job
- inputs List<Property Map>
- Input data. Can have mulitple
- labels List<String>
- model Property Map
- The model to use
- name String
- The name of Compute Environment
- outputs List<Property Map>
- Output data. Can have mulitple
- reference String
- status String
- status
Message String - timeout Number
- How long the job should run max.
Supporting Types
AiInferenceJobComputeTarget, AiInferenceJobComputeTargetArgs
- Identifier string
- Reference string
- The reference of the Compute Target
- Identifier string
- Reference string
- The reference of the Compute Target
- identifier String
- reference String
- The reference of the Compute Target
- identifier string
- reference string
- The reference of the Compute Target
- identifier str
- reference str
- The reference of the Compute Target
- identifier String
- reference String
- The reference of the Compute Target
AiInferenceJobInput, AiInferenceJobInputArgs
AiInferenceJobModel, AiInferenceJobModelArgs
- Identifier string
- Reference string
- The reference of the Inference module
- Identifier string
- Reference string
- The reference of the Inference module
- identifier String
- reference String
- The reference of the Inference module
- identifier string
- reference string
- The reference of the Inference module
- identifier str
- reference str
- The reference of the Inference module
- identifier String
- reference String
- The reference of the Inference module
AiInferenceJobOutput, AiInferenceJobOutputArgs
Import
$ pulumi import hsdp:index/aiInferenceJob:AiInferenceJob An existing Compute Environment can be imported using `hsdp_ai_inference_compute_target`, e.g.
bash
$ pulumi import hsdp:index/aiInferenceJob:AiInferenceJob target a-guid
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- hsdp philips-software/terraform-provider-hsdp
- License
- Notes
- This Pulumi package is based on the
hsdp
Terraform Provider.