1. Packages
  2. AWS
  3. API Docs
  4. bedrock
  5. getInferenceProfiles
AWS v7.4.0 published on Wednesday, Aug 13, 2025 by Pulumi

aws.bedrock.getInferenceProfiles

Explore with Pulumi AI

aws logo
AWS v7.4.0 published on Wednesday, Aug 13, 2025 by Pulumi

    Data source for managing AWS Bedrock Inference Profiles.

    Example Usage

    Basic Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const test = aws.bedrock.getInferenceProfiles({});
    
    import pulumi
    import pulumi_aws as aws
    
    test = aws.bedrock.get_inference_profiles()
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v7/go/aws/bedrock"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := bedrock.GetInferenceProfiles(ctx, &bedrock.GetInferenceProfilesArgs{}, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var test = Aws.Bedrock.GetInferenceProfiles.Invoke();
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.bedrock.BedrockFunctions;
    import com.pulumi.aws.bedrock.inputs.GetInferenceProfilesArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var test = BedrockFunctions.getInferenceProfiles(GetInferenceProfilesArgs.builder()
                .build());
    
        }
    }
    
    variables:
      test:
        fn::invoke:
          function: aws:bedrock:getInferenceProfiles
          arguments: {}
    

    Filter by Type

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const test = aws.bedrock.getInferenceProfiles({
        type: "APPLICATION",
    });
    
    import pulumi
    import pulumi_aws as aws
    
    test = aws.bedrock.get_inference_profiles(type="APPLICATION")
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v7/go/aws/bedrock"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := bedrock.GetInferenceProfiles(ctx, &bedrock.GetInferenceProfilesArgs{
    			Type: pulumi.StringRef("APPLICATION"),
    		}, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var test = Aws.Bedrock.GetInferenceProfiles.Invoke(new()
        {
            Type = "APPLICATION",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.bedrock.BedrockFunctions;
    import com.pulumi.aws.bedrock.inputs.GetInferenceProfilesArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var test = BedrockFunctions.getInferenceProfiles(GetInferenceProfilesArgs.builder()
                .type("APPLICATION")
                .build());
    
        }
    }
    
    variables:
      test:
        fn::invoke:
          function: aws:bedrock:getInferenceProfiles
          arguments:
            type: APPLICATION
    

    Using getInferenceProfiles

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getInferenceProfiles(args: GetInferenceProfilesArgs, opts?: InvokeOptions): Promise<GetInferenceProfilesResult>
    function getInferenceProfilesOutput(args: GetInferenceProfilesOutputArgs, opts?: InvokeOptions): Output<GetInferenceProfilesResult>
    def get_inference_profiles(region: Optional[str] = None,
                               type: Optional[str] = None,
                               opts: Optional[InvokeOptions] = None) -> GetInferenceProfilesResult
    def get_inference_profiles_output(region: Optional[pulumi.Input[str]] = None,
                               type: Optional[pulumi.Input[str]] = None,
                               opts: Optional[InvokeOptions] = None) -> Output[GetInferenceProfilesResult]
    func GetInferenceProfiles(ctx *Context, args *GetInferenceProfilesArgs, opts ...InvokeOption) (*GetInferenceProfilesResult, error)
    func GetInferenceProfilesOutput(ctx *Context, args *GetInferenceProfilesOutputArgs, opts ...InvokeOption) GetInferenceProfilesResultOutput

    > Note: This function is named GetInferenceProfiles in the Go SDK.

    public static class GetInferenceProfiles 
    {
        public static Task<GetInferenceProfilesResult> InvokeAsync(GetInferenceProfilesArgs args, InvokeOptions? opts = null)
        public static Output<GetInferenceProfilesResult> Invoke(GetInferenceProfilesInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetInferenceProfilesResult> getInferenceProfiles(GetInferenceProfilesArgs args, InvokeOptions options)
    public static Output<GetInferenceProfilesResult> getInferenceProfiles(GetInferenceProfilesArgs args, InvokeOptions options)
    
    fn::invoke:
      function: aws:bedrock/getInferenceProfiles:getInferenceProfiles
      arguments:
        # arguments dictionary

    The following arguments are supported:

    Region string
    Region where this resource will be managed. Defaults to the Region set in the provider configuration.
    Type string
    Filters for inference profiles that match the type you specify. Valid values are: SYSTEM_DEFINED, APPLICATION.
    Region string
    Region where this resource will be managed. Defaults to the Region set in the provider configuration.
    Type string
    Filters for inference profiles that match the type you specify. Valid values are: SYSTEM_DEFINED, APPLICATION.
    region String
    Region where this resource will be managed. Defaults to the Region set in the provider configuration.
    type String
    Filters for inference profiles that match the type you specify. Valid values are: SYSTEM_DEFINED, APPLICATION.
    region string
    Region where this resource will be managed. Defaults to the Region set in the provider configuration.
    type string
    Filters for inference profiles that match the type you specify. Valid values are: SYSTEM_DEFINED, APPLICATION.
    region str
    Region where this resource will be managed. Defaults to the Region set in the provider configuration.
    type str
    Filters for inference profiles that match the type you specify. Valid values are: SYSTEM_DEFINED, APPLICATION.
    region String
    Region where this resource will be managed. Defaults to the Region set in the provider configuration.
    type String
    Filters for inference profiles that match the type you specify. Valid values are: SYSTEM_DEFINED, APPLICATION.

    getInferenceProfiles Result

    The following output properties are available:

    Id string
    The provider-assigned unique ID for this managed resource.
    InferenceProfileSummaries List<GetInferenceProfilesInferenceProfileSummary>
    List of inference profile summary objects. See inference_profile_summaries.
    Region string
    Type string
    Type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock. APPLICATION means the inference profile was created by a user.
    Id string
    The provider-assigned unique ID for this managed resource.
    InferenceProfileSummaries []GetInferenceProfilesInferenceProfileSummary
    List of inference profile summary objects. See inference_profile_summaries.
    Region string
    Type string
    Type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock. APPLICATION means the inference profile was created by a user.
    id String
    The provider-assigned unique ID for this managed resource.
    inferenceProfileSummaries List<GetInferenceProfilesInferenceProfileSummary>
    List of inference profile summary objects. See inference_profile_summaries.
    region String
    type String
    Type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock. APPLICATION means the inference profile was created by a user.
    id string
    The provider-assigned unique ID for this managed resource.
    inferenceProfileSummaries GetInferenceProfilesInferenceProfileSummary[]
    List of inference profile summary objects. See inference_profile_summaries.
    region string
    type string
    Type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock. APPLICATION means the inference profile was created by a user.
    id str
    The provider-assigned unique ID for this managed resource.
    inference_profile_summaries Sequence[GetInferenceProfilesInferenceProfileSummary]
    List of inference profile summary objects. See inference_profile_summaries.
    region str
    type str
    Type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock. APPLICATION means the inference profile was created by a user.
    id String
    The provider-assigned unique ID for this managed resource.
    inferenceProfileSummaries List<Property Map>
    List of inference profile summary objects. See inference_profile_summaries.
    region String
    type String
    Type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock. APPLICATION means the inference profile was created by a user.

    Supporting Types

    GetInferenceProfilesInferenceProfileSummary

    CreatedAt string
    Time at which the inference profile was created.
    Description string
    Description of the inference profile.
    InferenceProfileArn string
    Amazon Resource Name (ARN) of the inference profile.
    InferenceProfileId string
    Unique identifier of the inference profile.
    InferenceProfileName string
    Name of the inference profile.
    Models List<GetInferenceProfilesInferenceProfileSummaryModel>
    List of information about each model in the inference profile. See models Block.
    Status string
    Status of the inference profile. ACTIVE means that the inference profile is available to use.
    Type string
    Filters for inference profiles that match the type you specify. Valid values are: SYSTEM_DEFINED, APPLICATION.
    UpdatedAt string
    Time at which the inference profile was last updated.
    CreatedAt string
    Time at which the inference profile was created.
    Description string
    Description of the inference profile.
    InferenceProfileArn string
    Amazon Resource Name (ARN) of the inference profile.
    InferenceProfileId string
    Unique identifier of the inference profile.
    InferenceProfileName string
    Name of the inference profile.
    Models []GetInferenceProfilesInferenceProfileSummaryModel
    List of information about each model in the inference profile. See models Block.
    Status string
    Status of the inference profile. ACTIVE means that the inference profile is available to use.
    Type string
    Filters for inference profiles that match the type you specify. Valid values are: SYSTEM_DEFINED, APPLICATION.
    UpdatedAt string
    Time at which the inference profile was last updated.
    createdAt String
    Time at which the inference profile was created.
    description String
    Description of the inference profile.
    inferenceProfileArn String
    Amazon Resource Name (ARN) of the inference profile.
    inferenceProfileId String
    Unique identifier of the inference profile.
    inferenceProfileName String
    Name of the inference profile.
    models List<GetInferenceProfilesInferenceProfileSummaryModel>
    List of information about each model in the inference profile. See models Block.
    status String
    Status of the inference profile. ACTIVE means that the inference profile is available to use.
    type String
    Filters for inference profiles that match the type you specify. Valid values are: SYSTEM_DEFINED, APPLICATION.
    updatedAt String
    Time at which the inference profile was last updated.
    createdAt string
    Time at which the inference profile was created.
    description string
    Description of the inference profile.
    inferenceProfileArn string
    Amazon Resource Name (ARN) of the inference profile.
    inferenceProfileId string
    Unique identifier of the inference profile.
    inferenceProfileName string
    Name of the inference profile.
    models GetInferenceProfilesInferenceProfileSummaryModel[]
    List of information about each model in the inference profile. See models Block.
    status string
    Status of the inference profile. ACTIVE means that the inference profile is available to use.
    type string
    Filters for inference profiles that match the type you specify. Valid values are: SYSTEM_DEFINED, APPLICATION.
    updatedAt string
    Time at which the inference profile was last updated.
    created_at str
    Time at which the inference profile was created.
    description str
    Description of the inference profile.
    inference_profile_arn str
    Amazon Resource Name (ARN) of the inference profile.
    inference_profile_id str
    Unique identifier of the inference profile.
    inference_profile_name str
    Name of the inference profile.
    models Sequence[GetInferenceProfilesInferenceProfileSummaryModel]
    List of information about each model in the inference profile. See models Block.
    status str
    Status of the inference profile. ACTIVE means that the inference profile is available to use.
    type str
    Filters for inference profiles that match the type you specify. Valid values are: SYSTEM_DEFINED, APPLICATION.
    updated_at str
    Time at which the inference profile was last updated.
    createdAt String
    Time at which the inference profile was created.
    description String
    Description of the inference profile.
    inferenceProfileArn String
    Amazon Resource Name (ARN) of the inference profile.
    inferenceProfileId String
    Unique identifier of the inference profile.
    inferenceProfileName String
    Name of the inference profile.
    models List<Property Map>
    List of information about each model in the inference profile. See models Block.
    status String
    Status of the inference profile. ACTIVE means that the inference profile is available to use.
    type String
    Filters for inference profiles that match the type you specify. Valid values are: SYSTEM_DEFINED, APPLICATION.
    updatedAt String
    Time at which the inference profile was last updated.

    GetInferenceProfilesInferenceProfileSummaryModel

    ModelArn string
    Amazon Resource Name (ARN) of the model.
    ModelArn string
    Amazon Resource Name (ARN) of the model.
    modelArn String
    Amazon Resource Name (ARN) of the model.
    modelArn string
    Amazon Resource Name (ARN) of the model.
    model_arn str
    Amazon Resource Name (ARN) of the model.
    modelArn String
    Amazon Resource Name (ARN) of the model.

    Package Details

    Repository
    AWS Classic pulumi/pulumi-aws
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the aws Terraform Provider.
    aws logo
    AWS v7.4.0 published on Wednesday, Aug 13, 2025 by Pulumi