1. Packages
  2. Packages
  3. DigitalOcean Provider
  4. API Docs
  5. getDedicatedInferenceSizes
Viewing docs for DigitalOcean v4.65.0
published on Wednesday, Apr 29, 2026 by Pulumi
digitalocean logo
Viewing docs for DigitalOcean v4.65.0
published on Wednesday, Apr 29, 2026 by Pulumi

    Returns the available GPU sizes and their configurations for dedicated inference endpoints, including pricing, hardware specifications, and region availability.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as digitalocean from "@pulumi/digitalocean";
    
    const available = digitalocean.getDedicatedInferenceSizes({});
    export const enabledRegions = available.then(available => available.enabledRegions);
    export const sizes = available.then(available => available.sizes);
    
    import pulumi
    import pulumi_digitalocean as digitalocean
    
    available = digitalocean.get_dedicated_inference_sizes()
    pulumi.export("enabledRegions", available.enabled_regions)
    pulumi.export("sizes", available.sizes)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-digitalocean/sdk/v4/go/digitalocean"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		available, err := digitalocean.GetDedicatedInferenceSizes(ctx, map[string]interface{}{}, nil)
    		if err != nil {
    			return err
    		}
    		ctx.Export("enabledRegions", available.EnabledRegions)
    		ctx.Export("sizes", available.Sizes)
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using DigitalOcean = Pulumi.DigitalOcean;
    
    return await Deployment.RunAsync(() => 
    {
        var available = DigitalOcean.Index.GetDedicatedInferenceSizes.Invoke();
    
        return new Dictionary<string, object?>
        {
            ["enabledRegions"] = available.Apply(getDedicatedInferenceSizesResult => getDedicatedInferenceSizesResult.EnabledRegions),
            ["sizes"] = available.Apply(getDedicatedInferenceSizesResult => getDedicatedInferenceSizesResult.Sizes),
        };
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.digitalocean.DigitaloceanFunctions;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var available = DigitaloceanFunctions.getDedicatedInferenceSizes(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference);
    
            ctx.export("enabledRegions", available.enabledRegions());
            ctx.export("sizes", available.sizes());
        }
    }
    
    variables:
      available:
        fn::invoke:
          function: digitalocean:getDedicatedInferenceSizes
          arguments: {}
    outputs:
      enabledRegions: ${available.enabledRegions}
      sizes: ${available.sizes}
    

    Using getDedicatedInferenceSizes

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getDedicatedInferenceSizes(opts?: InvokeOptions): Promise<GetDedicatedInferenceSizesResult>
    function getDedicatedInferenceSizesOutput(opts?: InvokeOptions): Output<GetDedicatedInferenceSizesResult>
    def get_dedicated_inference_sizes(opts: Optional[InvokeOptions] = None) -> GetDedicatedInferenceSizesResult
    def get_dedicated_inference_sizes_output(opts: Optional[InvokeOptions] = None) -> Output[GetDedicatedInferenceSizesResult]
    func GetDedicatedInferenceSizes(ctx *Context, opts ...InvokeOption) (*GetDedicatedInferenceSizesResult, error)
    func GetDedicatedInferenceSizesOutput(ctx *Context, opts ...InvokeOption) GetDedicatedInferenceSizesResultOutput

    > Note: This function is named GetDedicatedInferenceSizes in the Go SDK.

    public static class GetDedicatedInferenceSizes 
    {
        public static Task<GetDedicatedInferenceSizesResult> InvokeAsync(InvokeOptions? opts = null)
        public static Output<GetDedicatedInferenceSizesResult> Invoke(InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetDedicatedInferenceSizesResult> getDedicatedInferenceSizes(InvokeOptions options)
    public static Output<GetDedicatedInferenceSizesResult> getDedicatedInferenceSizes(InvokeOptions options)
    
    fn::invoke:
      function: digitalocean:index/getDedicatedInferenceSizes:getDedicatedInferenceSizes
      arguments:
        # arguments dictionary

    getDedicatedInferenceSizes Result

    The following output properties are available:

    EnabledRegions List<string>
    The list of region slugs where dedicated inference endpoints can be deployed.
    Id string
    The provider-assigned unique ID for this managed resource.
    Sizes List<Pulumi.DigitalOcean.Outputs.GetDedicatedInferenceSizesSize>
    The list of available GPU sizes. Each element contains:
    EnabledRegions []string
    The list of region slugs where dedicated inference endpoints can be deployed.
    Id string
    The provider-assigned unique ID for this managed resource.
    Sizes []GetDedicatedInferenceSizesSize
    The list of available GPU sizes. Each element contains:
    enabledRegions List<String>
    The list of region slugs where dedicated inference endpoints can be deployed.
    id String
    The provider-assigned unique ID for this managed resource.
    sizes List<GetDedicatedInferenceSizesSize>
    The list of available GPU sizes. Each element contains:
    enabledRegions string[]
    The list of region slugs where dedicated inference endpoints can be deployed.
    id string
    The provider-assigned unique ID for this managed resource.
    sizes GetDedicatedInferenceSizesSize[]
    The list of available GPU sizes. Each element contains:
    enabled_regions Sequence[str]
    The list of region slugs where dedicated inference endpoints can be deployed.
    id str
    The provider-assigned unique ID for this managed resource.
    sizes Sequence[GetDedicatedInferenceSizesSize]
    The list of available GPU sizes. Each element contains:
    enabledRegions List<String>
    The list of region slugs where dedicated inference endpoints can be deployed.
    id String
    The provider-assigned unique ID for this managed resource.
    sizes List<Property Map>
    The list of available GPU sizes. Each element contains:

    Supporting Types

    GetDedicatedInferenceSizesSize

    Cpu int
    The number of vCPUs.
    Currency string
    The currency for the price.
    Disks List<Pulumi.DigitalOcean.Inputs.GetDedicatedInferenceSizesSizeDisk>
    The disks attached to this size. Each element contains:
    GpuSlug string
    The slug identifier for this GPU size.
    Gpus List<Pulumi.DigitalOcean.Inputs.GetDedicatedInferenceSizesSizeGpus>
    GPU hardware details. Each element contains:
    Memory int
    The amount of memory in MiB.
    PricePerHour string
    The hourly price for this GPU size.
    Regions List<string>
    The regions where this GPU size is available.
    SizeCategories List<Pulumi.DigitalOcean.Inputs.GetDedicatedInferenceSizesSizeSizeCategory>
    The category this size belongs to. Each element contains:
    Cpu int
    The number of vCPUs.
    Currency string
    The currency for the price.
    Disks []GetDedicatedInferenceSizesSizeDisk
    The disks attached to this size. Each element contains:
    GpuSlug string
    The slug identifier for this GPU size.
    Gpus []GetDedicatedInferenceSizesSizeGpus
    GPU hardware details. Each element contains:
    Memory int
    The amount of memory in MiB.
    PricePerHour string
    The hourly price for this GPU size.
    Regions []string
    The regions where this GPU size is available.
    SizeCategories []GetDedicatedInferenceSizesSizeSizeCategory
    The category this size belongs to. Each element contains:
    cpu Integer
    The number of vCPUs.
    currency String
    The currency for the price.
    disks List<GetDedicatedInferenceSizesSizeDisk>
    The disks attached to this size. Each element contains:
    gpuSlug String
    The slug identifier for this GPU size.
    gpus List<GetDedicatedInferenceSizesSizeGpus>
    GPU hardware details. Each element contains:
    memory Integer
    The amount of memory in MiB.
    pricePerHour String
    The hourly price for this GPU size.
    regions List<String>
    The regions where this GPU size is available.
    sizeCategories List<GetDedicatedInferenceSizesSizeSizeCategory>
    The category this size belongs to. Each element contains:
    cpu number
    The number of vCPUs.
    currency string
    The currency for the price.
    disks GetDedicatedInferenceSizesSizeDisk[]
    The disks attached to this size. Each element contains:
    gpuSlug string
    The slug identifier for this GPU size.
    gpus GetDedicatedInferenceSizesSizeGpus[]
    GPU hardware details. Each element contains:
    memory number
    The amount of memory in MiB.
    pricePerHour string
    The hourly price for this GPU size.
    regions string[]
    The regions where this GPU size is available.
    sizeCategories GetDedicatedInferenceSizesSizeSizeCategory[]
    The category this size belongs to. Each element contains:
    cpu int
    The number of vCPUs.
    currency str
    The currency for the price.
    disks Sequence[GetDedicatedInferenceSizesSizeDisk]
    The disks attached to this size. Each element contains:
    gpu_slug str
    The slug identifier for this GPU size.
    gpus Sequence[GetDedicatedInferenceSizesSizeGpus]
    GPU hardware details. Each element contains:
    memory int
    The amount of memory in MiB.
    price_per_hour str
    The hourly price for this GPU size.
    regions Sequence[str]
    The regions where this GPU size is available.
    size_categories Sequence[GetDedicatedInferenceSizesSizeSizeCategory]
    The category this size belongs to. Each element contains:
    cpu Number
    The number of vCPUs.
    currency String
    The currency for the price.
    disks List<Property Map>
    The disks attached to this size. Each element contains:
    gpuSlug String
    The slug identifier for this GPU size.
    gpus List<Property Map>
    GPU hardware details. Each element contains:
    memory Number
    The amount of memory in MiB.
    pricePerHour String
    The hourly price for this GPU size.
    regions List<String>
    The regions where this GPU size is available.
    sizeCategories List<Property Map>
    The category this size belongs to. Each element contains:

    GetDedicatedInferenceSizesSizeDisk

    SizeGb int
    The disk size in GiB.
    Type string
    The disk type.
    SizeGb int
    The disk size in GiB.
    Type string
    The disk type.
    sizeGb Integer
    The disk size in GiB.
    type String
    The disk type.
    sizeGb number
    The disk size in GiB.
    type string
    The disk type.
    size_gb int
    The disk size in GiB.
    type str
    The disk type.
    sizeGb Number
    The disk size in GiB.
    type String
    The disk type.

    GetDedicatedInferenceSizesSizeGpus

    Count int
    The number of GPUs.
    Slug string
    The GPU model slug.
    VramGb int
    The VRAM per GPU in GiB.
    Count int
    The number of GPUs.
    Slug string
    The GPU model slug.
    VramGb int
    The VRAM per GPU in GiB.
    count Integer
    The number of GPUs.
    slug String
    The GPU model slug.
    vramGb Integer
    The VRAM per GPU in GiB.
    count number
    The number of GPUs.
    slug string
    The GPU model slug.
    vramGb number
    The VRAM per GPU in GiB.
    count int
    The number of GPUs.
    slug str
    The GPU model slug.
    vram_gb int
    The VRAM per GPU in GiB.
    count Number
    The number of GPUs.
    slug String
    The GPU model slug.
    vramGb Number
    The VRAM per GPU in GiB.

    GetDedicatedInferenceSizesSizeSizeCategory

    FleetName string
    The fleet name associated with the size category.
    Name string
    The display name of the size category.
    FleetName string
    The fleet name associated with the size category.
    Name string
    The display name of the size category.
    fleetName String
    The fleet name associated with the size category.
    name String
    The display name of the size category.
    fleetName string
    The fleet name associated with the size category.
    name string
    The display name of the size category.
    fleet_name str
    The fleet name associated with the size category.
    name str
    The display name of the size category.
    fleetName String
    The fleet name associated with the size category.
    name String
    The display name of the size category.

    Package Details

    Repository
    DigitalOcean pulumi/pulumi-digitalocean
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the digitalocean Terraform Provider.
    digitalocean logo
    Viewing docs for DigitalOcean v4.65.0
    published on Wednesday, Apr 29, 2026 by Pulumi
      Try Pulumi Cloud free. Your team will thank you.