Viewing docs for DigitalOcean v4.65.0
published on Wednesday, Apr 29, 2026 by Pulumi
published on Wednesday, Apr 29, 2026 by Pulumi
Viewing docs for DigitalOcean v4.65.0
published on Wednesday, Apr 29, 2026 by Pulumi
published on Wednesday, Apr 29, 2026 by Pulumi
Returns the available GPU sizes and their configurations for dedicated inference endpoints, including pricing, hardware specifications, and region availability.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as digitalocean from "@pulumi/digitalocean";
const available = digitalocean.getDedicatedInferenceSizes({});
export const enabledRegions = available.then(available => available.enabledRegions);
export const sizes = available.then(available => available.sizes);
import pulumi
import pulumi_digitalocean as digitalocean
available = digitalocean.get_dedicated_inference_sizes()
pulumi.export("enabledRegions", available.enabled_regions)
pulumi.export("sizes", available.sizes)
package main
import (
"github.com/pulumi/pulumi-digitalocean/sdk/v4/go/digitalocean"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
available, err := digitalocean.GetDedicatedInferenceSizes(ctx, map[string]interface{}{}, nil)
if err != nil {
return err
}
ctx.Export("enabledRegions", available.EnabledRegions)
ctx.Export("sizes", available.Sizes)
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using DigitalOcean = Pulumi.DigitalOcean;
return await Deployment.RunAsync(() =>
{
var available = DigitalOcean.Index.GetDedicatedInferenceSizes.Invoke();
return new Dictionary<string, object?>
{
["enabledRegions"] = available.Apply(getDedicatedInferenceSizesResult => getDedicatedInferenceSizesResult.EnabledRegions),
["sizes"] = available.Apply(getDedicatedInferenceSizesResult => getDedicatedInferenceSizesResult.Sizes),
};
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.digitalocean.DigitaloceanFunctions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var available = DigitaloceanFunctions.getDedicatedInferenceSizes(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference);
ctx.export("enabledRegions", available.enabledRegions());
ctx.export("sizes", available.sizes());
}
}
variables:
available:
fn::invoke:
function: digitalocean:getDedicatedInferenceSizes
arguments: {}
outputs:
enabledRegions: ${available.enabledRegions}
sizes: ${available.sizes}
Using getDedicatedInferenceSizes
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getDedicatedInferenceSizes(opts?: InvokeOptions): Promise<GetDedicatedInferenceSizesResult>
function getDedicatedInferenceSizesOutput(opts?: InvokeOptions): Output<GetDedicatedInferenceSizesResult>def get_dedicated_inference_sizes(opts: Optional[InvokeOptions] = None) -> GetDedicatedInferenceSizesResult
def get_dedicated_inference_sizes_output(opts: Optional[InvokeOptions] = None) -> Output[GetDedicatedInferenceSizesResult]func GetDedicatedInferenceSizes(ctx *Context, opts ...InvokeOption) (*GetDedicatedInferenceSizesResult, error)
func GetDedicatedInferenceSizesOutput(ctx *Context, opts ...InvokeOption) GetDedicatedInferenceSizesResultOutput> Note: This function is named GetDedicatedInferenceSizes in the Go SDK.
public static class GetDedicatedInferenceSizes
{
public static Task<GetDedicatedInferenceSizesResult> InvokeAsync(InvokeOptions? opts = null)
public static Output<GetDedicatedInferenceSizesResult> Invoke(InvokeOptions? opts = null)
}public static CompletableFuture<GetDedicatedInferenceSizesResult> getDedicatedInferenceSizes(InvokeOptions options)
public static Output<GetDedicatedInferenceSizesResult> getDedicatedInferenceSizes(InvokeOptions options)
fn::invoke:
function: digitalocean:index/getDedicatedInferenceSizes:getDedicatedInferenceSizes
arguments:
# arguments dictionarygetDedicatedInferenceSizes Result
The following output properties are available:
- Enabled
Regions List<string> - The list of region slugs where dedicated inference endpoints can be deployed.
- Id string
- The provider-assigned unique ID for this managed resource.
- Sizes
List<Pulumi.
Digital Ocean. Outputs. Get Dedicated Inference Sizes Size> - The list of available GPU sizes. Each element contains:
- Enabled
Regions []string - The list of region slugs where dedicated inference endpoints can be deployed.
- Id string
- The provider-assigned unique ID for this managed resource.
- Sizes
[]Get
Dedicated Inference Sizes Size - The list of available GPU sizes. Each element contains:
- enabled
Regions List<String> - The list of region slugs where dedicated inference endpoints can be deployed.
- id String
- The provider-assigned unique ID for this managed resource.
- sizes
List<Get
Dedicated Inference Sizes Size> - The list of available GPU sizes. Each element contains:
- enabled
Regions string[] - The list of region slugs where dedicated inference endpoints can be deployed.
- id string
- The provider-assigned unique ID for this managed resource.
- sizes
Get
Dedicated Inference Sizes Size[] - The list of available GPU sizes. Each element contains:
- enabled_
regions Sequence[str] - The list of region slugs where dedicated inference endpoints can be deployed.
- id str
- The provider-assigned unique ID for this managed resource.
- sizes
Sequence[Get
Dedicated Inference Sizes Size] - The list of available GPU sizes. Each element contains:
- enabled
Regions List<String> - The list of region slugs where dedicated inference endpoints can be deployed.
- id String
- The provider-assigned unique ID for this managed resource.
- sizes List<Property Map>
- The list of available GPU sizes. Each element contains:
Supporting Types
GetDedicatedInferenceSizesSize
- Cpu int
- The number of vCPUs.
- Currency string
- The currency for the price.
- Disks
List<Pulumi.
Digital Ocean. Inputs. Get Dedicated Inference Sizes Size Disk> - The disks attached to this size. Each element contains:
- Gpu
Slug string - The slug identifier for this GPU size.
- Gpus
List<Pulumi.
Digital Ocean. Inputs. Get Dedicated Inference Sizes Size Gpus> - GPU hardware details. Each element contains:
- Memory int
- The amount of memory in MiB.
- Price
Per stringHour - The hourly price for this GPU size.
- Regions List<string>
- The regions where this GPU size is available.
- Size
Categories List<Pulumi.Digital Ocean. Inputs. Get Dedicated Inference Sizes Size Size Category> - The category this size belongs to. Each element contains:
- Cpu int
- The number of vCPUs.
- Currency string
- The currency for the price.
- Disks
[]Get
Dedicated Inference Sizes Size Disk - The disks attached to this size. Each element contains:
- Gpu
Slug string - The slug identifier for this GPU size.
- Gpus
[]Get
Dedicated Inference Sizes Size Gpus - GPU hardware details. Each element contains:
- Memory int
- The amount of memory in MiB.
- Price
Per stringHour - The hourly price for this GPU size.
- Regions []string
- The regions where this GPU size is available.
- Size
Categories []GetDedicated Inference Sizes Size Size Category - The category this size belongs to. Each element contains:
- cpu Integer
- The number of vCPUs.
- currency String
- The currency for the price.
- disks
List<Get
Dedicated Inference Sizes Size Disk> - The disks attached to this size. Each element contains:
- gpu
Slug String - The slug identifier for this GPU size.
- gpus
List<Get
Dedicated Inference Sizes Size Gpus> - GPU hardware details. Each element contains:
- memory Integer
- The amount of memory in MiB.
- price
Per StringHour - The hourly price for this GPU size.
- regions List<String>
- The regions where this GPU size is available.
- size
Categories List<GetDedicated Inference Sizes Size Size Category> - The category this size belongs to. Each element contains:
- cpu number
- The number of vCPUs.
- currency string
- The currency for the price.
- disks
Get
Dedicated Inference Sizes Size Disk[] - The disks attached to this size. Each element contains:
- gpu
Slug string - The slug identifier for this GPU size.
- gpus
Get
Dedicated Inference Sizes Size Gpus[] - GPU hardware details. Each element contains:
- memory number
- The amount of memory in MiB.
- price
Per stringHour - The hourly price for this GPU size.
- regions string[]
- The regions where this GPU size is available.
- size
Categories GetDedicated Inference Sizes Size Size Category[] - The category this size belongs to. Each element contains:
- cpu int
- The number of vCPUs.
- currency str
- The currency for the price.
- disks
Sequence[Get
Dedicated Inference Sizes Size Disk] - The disks attached to this size. Each element contains:
- gpu_
slug str - The slug identifier for this GPU size.
- gpus
Sequence[Get
Dedicated Inference Sizes Size Gpus] - GPU hardware details. Each element contains:
- memory int
- The amount of memory in MiB.
- price_
per_ strhour - The hourly price for this GPU size.
- regions Sequence[str]
- The regions where this GPU size is available.
- size_
categories Sequence[GetDedicated Inference Sizes Size Size Category] - The category this size belongs to. Each element contains:
- cpu Number
- The number of vCPUs.
- currency String
- The currency for the price.
- disks List<Property Map>
- The disks attached to this size. Each element contains:
- gpu
Slug String - The slug identifier for this GPU size.
- gpus List<Property Map>
- GPU hardware details. Each element contains:
- memory Number
- The amount of memory in MiB.
- price
Per StringHour - The hourly price for this GPU size.
- regions List<String>
- The regions where this GPU size is available.
- size
Categories List<Property Map> - The category this size belongs to. Each element contains:
GetDedicatedInferenceSizesSizeDisk
GetDedicatedInferenceSizesSizeGpus
GetDedicatedInferenceSizesSizeSizeCategory
- fleet_
name str - The fleet name associated with the size category.
- name str
- The display name of the size category.
Package Details
- Repository
- DigitalOcean pulumi/pulumi-digitalocean
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
digitaloceanTerraform Provider.
Viewing docs for DigitalOcean v4.65.0
published on Wednesday, Apr 29, 2026 by Pulumi
published on Wednesday, Apr 29, 2026 by Pulumi
