hsdp.AiInferenceComputeEnvironment
Explore with Pulumi AI
Manages HSDP AI Inference Compute Environments
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as hsdp from "@pulumi/hsdp";
const inferenceConfig = hsdp.getConfig({
service: "inference",
});
const inferenceAiInferenceServiceInstance = inferenceConfig.then(inferenceConfig => hsdp.getAiInferenceServiceInstance({
baseUrl: inferenceConfig.url,
organizationId: _var.inference_tenant_org_id,
}));
const compute = new hsdp.AiInferenceComputeEnvironment("compute", {
endpoint: inferenceAiInferenceServiceInstance.then(inferenceAiInferenceServiceInstance => inferenceAiInferenceServiceInstance.endpoint),
image: "arn:aws:ecr:us-west-2:012345678910:repository/test",
});
import pulumi
import pulumi_hsdp as hsdp
inference_config = hsdp.get_config(service="inference")
inference_ai_inference_service_instance = hsdp.get_ai_inference_service_instance(base_url=inference_config.url,
organization_id=var["inference_tenant_org_id"])
compute = hsdp.AiInferenceComputeEnvironment("compute",
endpoint=inference_ai_inference_service_instance.endpoint,
image="arn:aws:ecr:us-west-2:012345678910:repository/test")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/hsdp/hsdp"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
inferenceConfig, err := hsdp.GetConfig(ctx, &hsdp.GetConfigArgs{
Service: "inference",
}, nil)
if err != nil {
return err
}
inferenceAiInferenceServiceInstance, err := hsdp.GetAiInferenceServiceInstance(ctx, &hsdp.GetAiInferenceServiceInstanceArgs{
BaseUrl: inferenceConfig.Url,
OrganizationId: _var.Inference_tenant_org_id,
}, nil)
if err != nil {
return err
}
_, err = hsdp.NewAiInferenceComputeEnvironment(ctx, "compute", &hsdp.AiInferenceComputeEnvironmentArgs{
Endpoint: pulumi.String(inferenceAiInferenceServiceInstance.Endpoint),
Image: pulumi.String("arn:aws:ecr:us-west-2:012345678910:repository/test"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Hsdp = Pulumi.Hsdp;
return await Deployment.RunAsync(() =>
{
var inferenceConfig = Hsdp.GetConfig.Invoke(new()
{
Service = "inference",
});
var inferenceAiInferenceServiceInstance = Hsdp.GetAiInferenceServiceInstance.Invoke(new()
{
BaseUrl = inferenceConfig.Apply(getConfigResult => getConfigResult.Url),
OrganizationId = @var.Inference_tenant_org_id,
});
var compute = new Hsdp.AiInferenceComputeEnvironment("compute", new()
{
Endpoint = inferenceAiInferenceServiceInstance.Apply(getAiInferenceServiceInstanceResult => getAiInferenceServiceInstanceResult.Endpoint),
Image = "arn:aws:ecr:us-west-2:012345678910:repository/test",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.hsdp.HsdpFunctions;
import com.pulumi.hsdp.inputs.GetConfigArgs;
import com.pulumi.hsdp.inputs.GetAiInferenceServiceInstanceArgs;
import com.pulumi.hsdp.AiInferenceComputeEnvironment;
import com.pulumi.hsdp.AiInferenceComputeEnvironmentArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var inferenceConfig = HsdpFunctions.getConfig(GetConfigArgs.builder()
.service("inference")
.build());
final var inferenceAiInferenceServiceInstance = HsdpFunctions.getAiInferenceServiceInstance(GetAiInferenceServiceInstanceArgs.builder()
.baseUrl(inferenceConfig.applyValue(getConfigResult -> getConfigResult.url()))
.organizationId(var_.inference_tenant_org_id())
.build());
var compute = new AiInferenceComputeEnvironment("compute", AiInferenceComputeEnvironmentArgs.builder()
.endpoint(inferenceAiInferenceServiceInstance.applyValue(getAiInferenceServiceInstanceResult -> getAiInferenceServiceInstanceResult.endpoint()))
.image("arn:aws:ecr:us-west-2:012345678910:repository/test")
.build());
}
}
resources:
compute:
type: hsdp:AiInferenceComputeEnvironment
properties:
endpoint: ${inferenceAiInferenceServiceInstance.endpoint}
image: arn:aws:ecr:us-west-2:012345678910:repository/test
variables:
inferenceConfig:
fn::invoke:
function: hsdp:getConfig
arguments:
service: inference
inferenceAiInferenceServiceInstance:
fn::invoke:
function: hsdp:getAiInferenceServiceInstance
arguments:
baseUrl: ${inferenceConfig.url}
organizationId: ${var.inference_tenant_org_id}
The following arguments are supported:
endpoint
- (Required) The AI Inference instance endpointname
- (Required) The name of Compute Environmentimage
- (Required) The image to use for the Compute Environment
Create AiInferenceComputeEnvironment Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new AiInferenceComputeEnvironment(name: string, args: AiInferenceComputeEnvironmentArgs, opts?: CustomResourceOptions);
@overload
def AiInferenceComputeEnvironment(resource_name: str,
args: AiInferenceComputeEnvironmentArgs,
opts: Optional[ResourceOptions] = None)
@overload
def AiInferenceComputeEnvironment(resource_name: str,
opts: Optional[ResourceOptions] = None,
endpoint: Optional[str] = None,
image: Optional[str] = None,
ai_inference_compute_environment_id: Optional[str] = None,
description: Optional[str] = None,
name: Optional[str] = None)
func NewAiInferenceComputeEnvironment(ctx *Context, name string, args AiInferenceComputeEnvironmentArgs, opts ...ResourceOption) (*AiInferenceComputeEnvironment, error)
public AiInferenceComputeEnvironment(string name, AiInferenceComputeEnvironmentArgs args, CustomResourceOptions? opts = null)
public AiInferenceComputeEnvironment(String name, AiInferenceComputeEnvironmentArgs args)
public AiInferenceComputeEnvironment(String name, AiInferenceComputeEnvironmentArgs args, CustomResourceOptions options)
type: hsdp:AiInferenceComputeEnvironment
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args AiInferenceComputeEnvironmentArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args AiInferenceComputeEnvironmentArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args AiInferenceComputeEnvironmentArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args AiInferenceComputeEnvironmentArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args AiInferenceComputeEnvironmentArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var aiInferenceComputeEnvironmentResource = new Hsdp.AiInferenceComputeEnvironment("aiInferenceComputeEnvironmentResource", new()
{
Endpoint = "string",
Image = "string",
AiInferenceComputeEnvironmentId = "string",
Description = "string",
Name = "string",
});
example, err := hsdp.NewAiInferenceComputeEnvironment(ctx, "aiInferenceComputeEnvironmentResource", &hsdp.AiInferenceComputeEnvironmentArgs{
Endpoint: pulumi.String("string"),
Image: pulumi.String("string"),
AiInferenceComputeEnvironmentId: pulumi.String("string"),
Description: pulumi.String("string"),
Name: pulumi.String("string"),
})
var aiInferenceComputeEnvironmentResource = new AiInferenceComputeEnvironment("aiInferenceComputeEnvironmentResource", AiInferenceComputeEnvironmentArgs.builder()
.endpoint("string")
.image("string")
.aiInferenceComputeEnvironmentId("string")
.description("string")
.name("string")
.build());
ai_inference_compute_environment_resource = hsdp.AiInferenceComputeEnvironment("aiInferenceComputeEnvironmentResource",
endpoint="string",
image="string",
ai_inference_compute_environment_id="string",
description="string",
name="string")
const aiInferenceComputeEnvironmentResource = new hsdp.AiInferenceComputeEnvironment("aiInferenceComputeEnvironmentResource", {
endpoint: "string",
image: "string",
aiInferenceComputeEnvironmentId: "string",
description: "string",
name: "string",
});
type: hsdp:AiInferenceComputeEnvironment
properties:
aiInferenceComputeEnvironmentId: string
description: string
endpoint: string
image: string
name: string
AiInferenceComputeEnvironment Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The AiInferenceComputeEnvironment resource accepts the following input properties:
- Endpoint string
- Image string
- Ai
Inference stringCompute Environment Id - The GUID of the Compute Environment
- Description string
- Name string
- Endpoint string
- Image string
- Ai
Inference stringCompute Environment Id - The GUID of the Compute Environment
- Description string
- Name string
- endpoint String
- image String
- ai
Inference StringCompute Environment Id - The GUID of the Compute Environment
- description String
- name String
- endpoint string
- image string
- ai
Inference stringCompute Environment Id - The GUID of the Compute Environment
- description string
- name string
- endpoint str
- image str
- ai_
inference_ strcompute_ environment_ id - The GUID of the Compute Environment
- description str
- name str
- endpoint String
- image String
- ai
Inference StringCompute Environment Id - The GUID of the Compute Environment
- description String
- name String
Outputs
All input properties are implicitly available as output properties. Additionally, the AiInferenceComputeEnvironment resource produces the following output properties:
- created str
- The date this Compute Environment was created
- created_
by str - Who created the environment
- id str
- The provider-assigned unique ID for this managed resource.
- is_
factory bool - Weather this Compute Environment is factory one
- reference str
- The reference of this Compute Environment
Look up Existing AiInferenceComputeEnvironment Resource
Get an existing AiInferenceComputeEnvironment resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: AiInferenceComputeEnvironmentState, opts?: CustomResourceOptions): AiInferenceComputeEnvironment
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
ai_inference_compute_environment_id: Optional[str] = None,
created: Optional[str] = None,
created_by: Optional[str] = None,
description: Optional[str] = None,
endpoint: Optional[str] = None,
image: Optional[str] = None,
is_factory: Optional[bool] = None,
name: Optional[str] = None,
reference: Optional[str] = None) -> AiInferenceComputeEnvironment
func GetAiInferenceComputeEnvironment(ctx *Context, name string, id IDInput, state *AiInferenceComputeEnvironmentState, opts ...ResourceOption) (*AiInferenceComputeEnvironment, error)
public static AiInferenceComputeEnvironment Get(string name, Input<string> id, AiInferenceComputeEnvironmentState? state, CustomResourceOptions? opts = null)
public static AiInferenceComputeEnvironment get(String name, Output<String> id, AiInferenceComputeEnvironmentState state, CustomResourceOptions options)
resources: _: type: hsdp:AiInferenceComputeEnvironment get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Ai
Inference stringCompute Environment Id - The GUID of the Compute Environment
- Created string
- The date this Compute Environment was created
- Created
By string - Who created the environment
- Description string
- Endpoint string
- Image string
- Is
Factory bool - Weather this Compute Environment is factory one
- Name string
- Reference string
- The reference of this Compute Environment
- Ai
Inference stringCompute Environment Id - The GUID of the Compute Environment
- Created string
- The date this Compute Environment was created
- Created
By string - Who created the environment
- Description string
- Endpoint string
- Image string
- Is
Factory bool - Weather this Compute Environment is factory one
- Name string
- Reference string
- The reference of this Compute Environment
- ai
Inference StringCompute Environment Id - The GUID of the Compute Environment
- created String
- The date this Compute Environment was created
- created
By String - Who created the environment
- description String
- endpoint String
- image String
- is
Factory Boolean - Weather this Compute Environment is factory one
- name String
- reference String
- The reference of this Compute Environment
- ai
Inference stringCompute Environment Id - The GUID of the Compute Environment
- created string
- The date this Compute Environment was created
- created
By string - Who created the environment
- description string
- endpoint string
- image string
- is
Factory boolean - Weather this Compute Environment is factory one
- name string
- reference string
- The reference of this Compute Environment
- ai_
inference_ strcompute_ environment_ id - The GUID of the Compute Environment
- created str
- The date this Compute Environment was created
- created_
by str - Who created the environment
- description str
- endpoint str
- image str
- is_
factory bool - Weather this Compute Environment is factory one
- name str
- reference str
- The reference of this Compute Environment
- ai
Inference StringCompute Environment Id - The GUID of the Compute Environment
- created String
- The date this Compute Environment was created
- created
By String - Who created the environment
- description String
- endpoint String
- image String
- is
Factory Boolean - Weather this Compute Environment is factory one
- name String
- reference String
- The reference of this Compute Environment
Import
$ pulumi import hsdp:index/aiInferenceComputeEnvironment:AiInferenceComputeEnvironment An existing Compute Environment can be imported using `hsdp_ai_inference_compute_environment`, e.g.
bash
$ pulumi import hsdp:index/aiInferenceComputeEnvironment:AiInferenceComputeEnvironment env a-guid
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- hsdp philips-software/terraform-provider-hsdp
- License
- Notes
- This Pulumi package is based on the
hsdp
Terraform Provider.