hsdp.AiInferenceModel
Explore with Pulumi AI
Manages HSDP AI Inference models.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as hsdp from "@pulumi/hsdp";
const inferenceConfig = hsdp.getConfig({
service: "inference",
});
const inferenceAiInferenceServiceInstance = inferenceConfig.then(inferenceConfig => hsdp.getAiInferenceServiceInstance({
baseUrl: inferenceConfig.url,
organizationId: _var.inference_tenant_org_id,
}));
const model = new hsdp.AiInferenceModel("model", {
endpoint: inferenceAiInferenceServiceInstance.then(inferenceAiInferenceServiceInstance => inferenceAiInferenceServiceInstance.endpoint),
version: "v1",
description: "Test model",
computeEnvironment: {
reference: "foo",
identifier: "bar",
},
sourceCode: {
url: "git@github.com:testuser/source.git",
branch: "main",
commitId: "e1f9366",
sshKey: "...",
},
artifactPath: "git@github.com:testuser/example.git",
entryCommands: ["python main/train.py -s 134786"],
environment: {
FOO: "bar",
BAR: "baz",
},
labels: ["CNN"],
additionalConfiguration: "{\"Tags\": [ { \"Key\": \"name\",\"Value\": \"hsp\"}]}",
});
import pulumi
import pulumi_hsdp as hsdp
inference_config = hsdp.get_config(service="inference")
inference_ai_inference_service_instance = hsdp.get_ai_inference_service_instance(base_url=inference_config.url,
organization_id=var["inference_tenant_org_id"])
model = hsdp.AiInferenceModel("model",
endpoint=inference_ai_inference_service_instance.endpoint,
version="v1",
description="Test model",
compute_environment={
"reference": "foo",
"identifier": "bar",
},
source_code={
"url": "git@github.com:testuser/source.git",
"branch": "main",
"commit_id": "e1f9366",
"ssh_key": "...",
},
artifact_path="git@github.com:testuser/example.git",
entry_commands=["python main/train.py -s 134786"],
environment={
"FOO": "bar",
"BAR": "baz",
},
labels=["CNN"],
additional_configuration="{\"Tags\": [ { \"Key\": \"name\",\"Value\": \"hsp\"}]}")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/hsdp/hsdp"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
inferenceConfig, err := hsdp.GetConfig(ctx, &hsdp.GetConfigArgs{
Service: "inference",
}, nil)
if err != nil {
return err
}
inferenceAiInferenceServiceInstance, err := hsdp.GetAiInferenceServiceInstance(ctx, &hsdp.GetAiInferenceServiceInstanceArgs{
BaseUrl: inferenceConfig.Url,
OrganizationId: _var.Inference_tenant_org_id,
}, nil)
if err != nil {
return err
}
_, err = hsdp.NewAiInferenceModel(ctx, "model", &hsdp.AiInferenceModelArgs{
Endpoint: pulumi.String(inferenceAiInferenceServiceInstance.Endpoint),
Version: pulumi.String("v1"),
Description: pulumi.String("Test model"),
ComputeEnvironment: &hsdp.AiInferenceModelComputeEnvironmentArgs{
Reference: pulumi.String("foo"),
Identifier: pulumi.String("bar"),
},
SourceCode: &hsdp.AiInferenceModelSourceCodeArgs{
Url: pulumi.String("git@github.com:testuser/source.git"),
Branch: pulumi.String("main"),
CommitId: pulumi.String("e1f9366"),
SshKey: pulumi.String("..."),
},
ArtifactPath: pulumi.String("git@github.com:testuser/example.git"),
EntryCommands: pulumi.StringArray{
pulumi.String("python main/train.py -s 134786"),
},
Environment: pulumi.StringMap{
"FOO": pulumi.String("bar"),
"BAR": pulumi.String("baz"),
},
Labels: pulumi.StringArray{
pulumi.String("CNN"),
},
AdditionalConfiguration: pulumi.String("{\"Tags\": [ { \"Key\": \"name\",\"Value\": \"hsp\"}]}"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Hsdp = Pulumi.Hsdp;
return await Deployment.RunAsync(() =>
{
var inferenceConfig = Hsdp.GetConfig.Invoke(new()
{
Service = "inference",
});
var inferenceAiInferenceServiceInstance = Hsdp.GetAiInferenceServiceInstance.Invoke(new()
{
BaseUrl = inferenceConfig.Apply(getConfigResult => getConfigResult.Url),
OrganizationId = @var.Inference_tenant_org_id,
});
var model = new Hsdp.AiInferenceModel("model", new()
{
Endpoint = inferenceAiInferenceServiceInstance.Apply(getAiInferenceServiceInstanceResult => getAiInferenceServiceInstanceResult.Endpoint),
Version = "v1",
Description = "Test model",
ComputeEnvironment = new Hsdp.Inputs.AiInferenceModelComputeEnvironmentArgs
{
Reference = "foo",
Identifier = "bar",
},
SourceCode = new Hsdp.Inputs.AiInferenceModelSourceCodeArgs
{
Url = "git@github.com:testuser/source.git",
Branch = "main",
CommitId = "e1f9366",
SshKey = "...",
},
ArtifactPath = "git@github.com:testuser/example.git",
EntryCommands = new[]
{
"python main/train.py -s 134786",
},
Environment =
{
{ "FOO", "bar" },
{ "BAR", "baz" },
},
Labels = new[]
{
"CNN",
},
AdditionalConfiguration = "{\"Tags\": [ { \"Key\": \"name\",\"Value\": \"hsp\"}]}",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.hsdp.HsdpFunctions;
import com.pulumi.hsdp.inputs.GetConfigArgs;
import com.pulumi.hsdp.inputs.GetAiInferenceServiceInstanceArgs;
import com.pulumi.hsdp.AiInferenceModel;
import com.pulumi.hsdp.AiInferenceModelArgs;
import com.pulumi.hsdp.inputs.AiInferenceModelComputeEnvironmentArgs;
import com.pulumi.hsdp.inputs.AiInferenceModelSourceCodeArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var inferenceConfig = HsdpFunctions.getConfig(GetConfigArgs.builder()
.service("inference")
.build());
final var inferenceAiInferenceServiceInstance = HsdpFunctions.getAiInferenceServiceInstance(GetAiInferenceServiceInstanceArgs.builder()
.baseUrl(inferenceConfig.applyValue(getConfigResult -> getConfigResult.url()))
.organizationId(var_.inference_tenant_org_id())
.build());
var model = new AiInferenceModel("model", AiInferenceModelArgs.builder()
.endpoint(inferenceAiInferenceServiceInstance.applyValue(getAiInferenceServiceInstanceResult -> getAiInferenceServiceInstanceResult.endpoint()))
.version("v1")
.description("Test model")
.computeEnvironment(AiInferenceModelComputeEnvironmentArgs.builder()
.reference("foo")
.identifier("bar")
.build())
.sourceCode(AiInferenceModelSourceCodeArgs.builder()
.url("git@github.com:testuser/source.git")
.branch("main")
.commitId("e1f9366")
.sshKey("...")
.build())
.artifactPath("git@github.com:testuser/example.git")
.entryCommands("python main/train.py -s 134786")
.environment(Map.ofEntries(
Map.entry("FOO", "bar"),
Map.entry("BAR", "baz")
))
.labels("CNN")
.additionalConfiguration("{\"Tags\": [ { \"Key\": \"name\",\"Value\": \"hsp\"}]}")
.build());
}
}
resources:
model:
type: hsdp:AiInferenceModel
properties:
endpoint: ${inferenceAiInferenceServiceInstance.endpoint}
version: v1
description: Test model
computeEnvironment:
reference: foo
identifier: bar
sourceCode:
url: git@github.com:testuser/source.git
branch: main
commitId: e1f9366
sshKey: '...'
artifactPath: git@github.com:testuser/example.git
entryCommands:
- python main/train.py -s 134786
environment:
FOO: bar
BAR: baz
labels:
- CNN
additionalConfiguration: '{"Tags": [ { "Key": "name","Value": "hsp"}]}'
variables:
inferenceConfig:
fn::invoke:
function: hsdp:getConfig
arguments:
service: inference
inferenceAiInferenceServiceInstance:
fn::invoke:
function: hsdp:getAiInferenceServiceInstance
arguments:
baseUrl: ${inferenceConfig.url}
organizationId: ${var.inference_tenant_org_id}
Create AiInferenceModel Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new AiInferenceModel(name: string, args: AiInferenceModelArgs, opts?: CustomResourceOptions);
@overload
def AiInferenceModel(resource_name: str,
args: AiInferenceModelArgs,
opts: Optional[ResourceOptions] = None)
@overload
def AiInferenceModel(resource_name: str,
opts: Optional[ResourceOptions] = None,
endpoint: Optional[str] = None,
version: Optional[str] = None,
additional_configuration: Optional[str] = None,
ai_inference_model_id: Optional[str] = None,
artifact_path: Optional[str] = None,
compute_environment: Optional[AiInferenceModelComputeEnvironmentArgs] = None,
description: Optional[str] = None,
entry_commands: Optional[Sequence[str]] = None,
environment: Optional[Mapping[str, str]] = None,
labels: Optional[Sequence[str]] = None,
name: Optional[str] = None,
source_code: Optional[AiInferenceModelSourceCodeArgs] = None)
func NewAiInferenceModel(ctx *Context, name string, args AiInferenceModelArgs, opts ...ResourceOption) (*AiInferenceModel, error)
public AiInferenceModel(string name, AiInferenceModelArgs args, CustomResourceOptions? opts = null)
public AiInferenceModel(String name, AiInferenceModelArgs args)
public AiInferenceModel(String name, AiInferenceModelArgs args, CustomResourceOptions options)
type: hsdp:AiInferenceModel
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args AiInferenceModelArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args AiInferenceModelArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args AiInferenceModelArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args AiInferenceModelArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args AiInferenceModelArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var aiInferenceModelResource = new Hsdp.AiInferenceModel("aiInferenceModelResource", new()
{
Endpoint = "string",
Version = "string",
AdditionalConfiguration = "string",
AiInferenceModelId = "string",
ArtifactPath = "string",
ComputeEnvironment = new Hsdp.Inputs.AiInferenceModelComputeEnvironmentArgs
{
Identifier = "string",
Reference = "string",
},
Description = "string",
EntryCommands = new[]
{
"string",
},
Environment =
{
{ "string", "string" },
},
Labels = new[]
{
"string",
},
Name = "string",
SourceCode = new Hsdp.Inputs.AiInferenceModelSourceCodeArgs
{
Url = "string",
Branch = "string",
CommitId = "string",
SshKey = "string",
},
});
example, err := hsdp.NewAiInferenceModel(ctx, "aiInferenceModelResource", &hsdp.AiInferenceModelArgs{
Endpoint: pulumi.String("string"),
Version: pulumi.String("string"),
AdditionalConfiguration: pulumi.String("string"),
AiInferenceModelId: pulumi.String("string"),
ArtifactPath: pulumi.String("string"),
ComputeEnvironment: &hsdp.AiInferenceModelComputeEnvironmentArgs{
Identifier: pulumi.String("string"),
Reference: pulumi.String("string"),
},
Description: pulumi.String("string"),
EntryCommands: pulumi.StringArray{
pulumi.String("string"),
},
Environment: pulumi.StringMap{
"string": pulumi.String("string"),
},
Labels: pulumi.StringArray{
pulumi.String("string"),
},
Name: pulumi.String("string"),
SourceCode: &hsdp.AiInferenceModelSourceCodeArgs{
Url: pulumi.String("string"),
Branch: pulumi.String("string"),
CommitId: pulumi.String("string"),
SshKey: pulumi.String("string"),
},
})
var aiInferenceModelResource = new AiInferenceModel("aiInferenceModelResource", AiInferenceModelArgs.builder()
.endpoint("string")
.version("string")
.additionalConfiguration("string")
.aiInferenceModelId("string")
.artifactPath("string")
.computeEnvironment(AiInferenceModelComputeEnvironmentArgs.builder()
.identifier("string")
.reference("string")
.build())
.description("string")
.entryCommands("string")
.environment(Map.of("string", "string"))
.labels("string")
.name("string")
.sourceCode(AiInferenceModelSourceCodeArgs.builder()
.url("string")
.branch("string")
.commitId("string")
.sshKey("string")
.build())
.build());
ai_inference_model_resource = hsdp.AiInferenceModel("aiInferenceModelResource",
endpoint="string",
version="string",
additional_configuration="string",
ai_inference_model_id="string",
artifact_path="string",
compute_environment={
"identifier": "string",
"reference": "string",
},
description="string",
entry_commands=["string"],
environment={
"string": "string",
},
labels=["string"],
name="string",
source_code={
"url": "string",
"branch": "string",
"commit_id": "string",
"ssh_key": "string",
})
const aiInferenceModelResource = new hsdp.AiInferenceModel("aiInferenceModelResource", {
endpoint: "string",
version: "string",
additionalConfiguration: "string",
aiInferenceModelId: "string",
artifactPath: "string",
computeEnvironment: {
identifier: "string",
reference: "string",
},
description: "string",
entryCommands: ["string"],
environment: {
string: "string",
},
labels: ["string"],
name: "string",
sourceCode: {
url: "string",
branch: "string",
commitId: "string",
sshKey: "string",
},
});
type: hsdp:AiInferenceModel
properties:
additionalConfiguration: string
aiInferenceModelId: string
artifactPath: string
computeEnvironment:
identifier: string
reference: string
description: string
endpoint: string
entryCommands:
- string
environment:
string: string
labels:
- string
name: string
sourceCode:
branch: string
commitId: string
sshKey: string
url: string
version: string
AiInferenceModel Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The AiInferenceModel resource accepts the following input properties:
- Endpoint string
- The AI Inference instance endpoint
- Version string
- Additional
Configuration string - Ai
Inference stringModel Id - The GUID of the Model
- Artifact
Path string - Compute
Environment AiInference Model Compute Environment - Description string
- Description of the Compute Target
- Entry
Commands List<string> - ) Commands to execute
- Environment Dictionary<string, string>
- List of environment variables to set
- Labels List<string>
- Name string
- The name of the Model
- Source
Code AiInference Model Source Code
- Endpoint string
- The AI Inference instance endpoint
- Version string
- Additional
Configuration string - Ai
Inference stringModel Id - The GUID of the Model
- Artifact
Path string - Compute
Environment AiInference Model Compute Environment Args - Description string
- Description of the Compute Target
- Entry
Commands []string - ) Commands to execute
- Environment map[string]string
- List of environment variables to set
- Labels []string
- Name string
- The name of the Model
- Source
Code AiInference Model Source Code Args
- endpoint String
- The AI Inference instance endpoint
- version String
- additional
Configuration String - ai
Inference StringModel Id - The GUID of the Model
- artifact
Path String - compute
Environment AiInference Model Compute Environment - description String
- Description of the Compute Target
- entry
Commands List<String> - ) Commands to execute
- environment Map<String,String>
- List of environment variables to set
- labels List<String>
- name String
- The name of the Model
- source
Code AiInference Model Source Code
- endpoint string
- The AI Inference instance endpoint
- version string
- additional
Configuration string - ai
Inference stringModel Id - The GUID of the Model
- artifact
Path string - compute
Environment AiInference Model Compute Environment - description string
- Description of the Compute Target
- entry
Commands string[] - ) Commands to execute
- environment {[key: string]: string}
- List of environment variables to set
- labels string[]
- name string
- The name of the Model
- source
Code AiInference Model Source Code
- endpoint str
- The AI Inference instance endpoint
- version str
- additional_
configuration str - ai_
inference_ strmodel_ id - The GUID of the Model
- artifact_
path str - compute_
environment AiInference Model Compute Environment Args - description str
- Description of the Compute Target
- entry_
commands Sequence[str] - ) Commands to execute
- environment Mapping[str, str]
- List of environment variables to set
- labels Sequence[str]
- name str
- The name of the Model
- source_
code AiInference Model Source Code Args
- endpoint String
- The AI Inference instance endpoint
- version String
- additional
Configuration String - ai
Inference StringModel Id - The GUID of the Model
- artifact
Path String - compute
Environment Property Map - description String
- Description of the Compute Target
- entry
Commands List<String> - ) Commands to execute
- environment Map<String>
- List of environment variables to set
- labels List<String>
- name String
- The name of the Model
- source
Code Property Map
Outputs
All input properties are implicitly available as output properties. Additionally, the AiInferenceModel resource produces the following output properties:
- created str
- The date this Model was created
- created_
by str - Who created the Model
- id str
- The provider-assigned unique ID for this managed resource.
- reference str
- The reference of this Model
Look up Existing AiInferenceModel Resource
Get an existing AiInferenceModel resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: AiInferenceModelState, opts?: CustomResourceOptions): AiInferenceModel
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
additional_configuration: Optional[str] = None,
ai_inference_model_id: Optional[str] = None,
artifact_path: Optional[str] = None,
compute_environment: Optional[AiInferenceModelComputeEnvironmentArgs] = None,
created: Optional[str] = None,
created_by: Optional[str] = None,
description: Optional[str] = None,
endpoint: Optional[str] = None,
entry_commands: Optional[Sequence[str]] = None,
environment: Optional[Mapping[str, str]] = None,
labels: Optional[Sequence[str]] = None,
name: Optional[str] = None,
reference: Optional[str] = None,
source_code: Optional[AiInferenceModelSourceCodeArgs] = None,
version: Optional[str] = None) -> AiInferenceModel
func GetAiInferenceModel(ctx *Context, name string, id IDInput, state *AiInferenceModelState, opts ...ResourceOption) (*AiInferenceModel, error)
public static AiInferenceModel Get(string name, Input<string> id, AiInferenceModelState? state, CustomResourceOptions? opts = null)
public static AiInferenceModel get(String name, Output<String> id, AiInferenceModelState state, CustomResourceOptions options)
resources: _: type: hsdp:AiInferenceModel get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Additional
Configuration string - Ai
Inference stringModel Id - The GUID of the Model
- Artifact
Path string - Compute
Environment AiInference Model Compute Environment - Created string
- The date this Model was created
- Created
By string - Who created the Model
- Description string
- Description of the Compute Target
- Endpoint string
- The AI Inference instance endpoint
- Entry
Commands List<string> - ) Commands to execute
- Environment Dictionary<string, string>
- List of environment variables to set
- Labels List<string>
- Name string
- The name of the Model
- Reference string
- The reference of this Model
- Source
Code AiInference Model Source Code - Version string
- Additional
Configuration string - Ai
Inference stringModel Id - The GUID of the Model
- Artifact
Path string - Compute
Environment AiInference Model Compute Environment Args - Created string
- The date this Model was created
- Created
By string - Who created the Model
- Description string
- Description of the Compute Target
- Endpoint string
- The AI Inference instance endpoint
- Entry
Commands []string - ) Commands to execute
- Environment map[string]string
- List of environment variables to set
- Labels []string
- Name string
- The name of the Model
- Reference string
- The reference of this Model
- Source
Code AiInference Model Source Code Args - Version string
- additional
Configuration String - ai
Inference StringModel Id - The GUID of the Model
- artifact
Path String - compute
Environment AiInference Model Compute Environment - created String
- The date this Model was created
- created
By String - Who created the Model
- description String
- Description of the Compute Target
- endpoint String
- The AI Inference instance endpoint
- entry
Commands List<String> - ) Commands to execute
- environment Map<String,String>
- List of environment variables to set
- labels List<String>
- name String
- The name of the Model
- reference String
- The reference of this Model
- source
Code AiInference Model Source Code - version String
- additional
Configuration string - ai
Inference stringModel Id - The GUID of the Model
- artifact
Path string - compute
Environment AiInference Model Compute Environment - created string
- The date this Model was created
- created
By string - Who created the Model
- description string
- Description of the Compute Target
- endpoint string
- The AI Inference instance endpoint
- entry
Commands string[] - ) Commands to execute
- environment {[key: string]: string}
- List of environment variables to set
- labels string[]
- name string
- The name of the Model
- reference string
- The reference of this Model
- source
Code AiInference Model Source Code - version string
- additional_
configuration str - ai_
inference_ strmodel_ id - The GUID of the Model
- artifact_
path str - compute_
environment AiInference Model Compute Environment Args - created str
- The date this Model was created
- created_
by str - Who created the Model
- description str
- Description of the Compute Target
- endpoint str
- The AI Inference instance endpoint
- entry_
commands Sequence[str] - ) Commands to execute
- environment Mapping[str, str]
- List of environment variables to set
- labels Sequence[str]
- name str
- The name of the Model
- reference str
- The reference of this Model
- source_
code AiInference Model Source Code Args - version str
- additional
Configuration String - ai
Inference StringModel Id - The GUID of the Model
- artifact
Path String - compute
Environment Property Map - created String
- The date this Model was created
- created
By String - Who created the Model
- description String
- Description of the Compute Target
- endpoint String
- The AI Inference instance endpoint
- entry
Commands List<String> - ) Commands to execute
- environment Map<String>
- List of environment variables to set
- labels List<String>
- name String
- The name of the Model
- reference String
- The reference of this Model
- source
Code Property Map - version String
Supporting Types
AiInferenceModelComputeEnvironment, AiInferenceModelComputeEnvironmentArgs
- Identifier string
- Reference string
- The reference of this Model
- Identifier string
- Reference string
- The reference of this Model
- identifier String
- reference String
- The reference of this Model
- identifier string
- reference string
- The reference of this Model
- identifier str
- reference str
- The reference of this Model
- identifier String
- reference String
- The reference of this Model
AiInferenceModelSourceCode, AiInferenceModelSourceCodeArgs
Package Details
- Repository
- hsdp philips-software/terraform-provider-hsdp
- License
- Notes
- This Pulumi package is based on the
hsdp
Terraform Provider.