1. Packages
  2. Airbyte Provider
  3. API Docs
  4. DestinationSnowflake
airbyte 1.0.0-rc6 published on Monday, Feb 16, 2026 by airbytehq
airbyte logo
airbyte 1.0.0-rc6 published on Monday, Feb 16, 2026 by airbytehq

    DestinationSnowflake Resource

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as airbyte from "@pulumi/airbyte";
    
    const myDestinationSnowflake = new airbyte.DestinationSnowflake("my_destination_snowflake", {
        configuration: {
            additionalProperties: "{ \"see\": \"documentation\" }",
            cdcDeletionMode: "Soft delete",
            credentials: {
                keyPairAuthentication: {
                    additionalProperties: "{ \"see\": \"documentation\" }",
                    authType: "Key Pair Authentication",
                    privateKey: "...my_private_key...",
                    privateKeyPassword: "...my_private_key_password...",
                },
            },
            database: "AIRBYTE_DATABASE",
            disableTypeDedupe: true,
            host: "accountname.us-east-2.aws.snowflakecomputing.com",
            jdbcUrlParams: "...my_jdbc_url_params...",
            rawDataSchema: "...my_raw_data_schema...",
            retentionPeriodDays: 9,
            role: "AIRBYTE_ROLE",
            schema: "AIRBYTE_SCHEMA",
            username: "AIRBYTE_USER",
            warehouse: "AIRBYTE_WAREHOUSE",
        },
        definitionId: "fce231ce-04a4-46ec-a244-d1436db0281f",
        name: "...my_name...",
        workspaceId: "058d9730-38a6-485c-8631-dc0cc86125f9",
    });
    
    import pulumi
    import pulumi_airbyte as airbyte
    
    my_destination_snowflake = airbyte.DestinationSnowflake("my_destination_snowflake",
        configuration={
            "additional_properties": "{ \"see\": \"documentation\" }",
            "cdc_deletion_mode": "Soft delete",
            "credentials": {
                "key_pair_authentication": {
                    "additional_properties": "{ \"see\": \"documentation\" }",
                    "auth_type": "Key Pair Authentication",
                    "private_key": "...my_private_key...",
                    "private_key_password": "...my_private_key_password...",
                },
            },
            "database": "AIRBYTE_DATABASE",
            "disable_type_dedupe": True,
            "host": "accountname.us-east-2.aws.snowflakecomputing.com",
            "jdbc_url_params": "...my_jdbc_url_params...",
            "raw_data_schema": "...my_raw_data_schema...",
            "retention_period_days": 9,
            "role": "AIRBYTE_ROLE",
            "schema": "AIRBYTE_SCHEMA",
            "username": "AIRBYTE_USER",
            "warehouse": "AIRBYTE_WAREHOUSE",
        },
        definition_id="fce231ce-04a4-46ec-a244-d1436db0281f",
        name="...my_name...",
        workspace_id="058d9730-38a6-485c-8631-dc0cc86125f9")
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-terraform-provider/sdks/go/airbyte/airbyte"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := airbyte.NewDestinationSnowflake(ctx, "my_destination_snowflake", &airbyte.DestinationSnowflakeArgs{
    			Configuration: &airbyte.DestinationSnowflakeConfigurationArgs{
    				AdditionalProperties: pulumi.String("{ \"see\": \"documentation\" }"),
    				CdcDeletionMode:      pulumi.String("Soft delete"),
    				Credentials: &airbyte.DestinationSnowflakeConfigurationCredentialsArgs{
    					KeyPairAuthentication: &airbyte.DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs{
    						AdditionalProperties: pulumi.String("{ \"see\": \"documentation\" }"),
    						AuthType:             pulumi.String("Key Pair Authentication"),
    						PrivateKey:           pulumi.String("...my_private_key..."),
    						PrivateKeyPassword:   pulumi.String("...my_private_key_password..."),
    					},
    				},
    				Database:            pulumi.String("AIRBYTE_DATABASE"),
    				DisableTypeDedupe:   pulumi.Bool(true),
    				Host:                pulumi.String("accountname.us-east-2.aws.snowflakecomputing.com"),
    				JdbcUrlParams:       pulumi.String("...my_jdbc_url_params..."),
    				RawDataSchema:       pulumi.String("...my_raw_data_schema..."),
    				RetentionPeriodDays: pulumi.Float64(9),
    				Role:                pulumi.String("AIRBYTE_ROLE"),
    				Schema:              pulumi.String("AIRBYTE_SCHEMA"),
    				Username:            pulumi.String("AIRBYTE_USER"),
    				Warehouse:           pulumi.String("AIRBYTE_WAREHOUSE"),
    			},
    			DefinitionId: pulumi.String("fce231ce-04a4-46ec-a244-d1436db0281f"),
    			Name:         pulumi.String("...my_name..."),
    			WorkspaceId:  pulumi.String("058d9730-38a6-485c-8631-dc0cc86125f9"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Airbyte = Pulumi.Airbyte;
    
    return await Deployment.RunAsync(() => 
    {
        var myDestinationSnowflake = new Airbyte.DestinationSnowflake("my_destination_snowflake", new()
        {
            Configuration = new Airbyte.Inputs.DestinationSnowflakeConfigurationArgs
            {
                AdditionalProperties = "{ \"see\": \"documentation\" }",
                CdcDeletionMode = "Soft delete",
                Credentials = new Airbyte.Inputs.DestinationSnowflakeConfigurationCredentialsArgs
                {
                    KeyPairAuthentication = new Airbyte.Inputs.DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs
                    {
                        AdditionalProperties = "{ \"see\": \"documentation\" }",
                        AuthType = "Key Pair Authentication",
                        PrivateKey = "...my_private_key...",
                        PrivateKeyPassword = "...my_private_key_password...",
                    },
                },
                Database = "AIRBYTE_DATABASE",
                DisableTypeDedupe = true,
                Host = "accountname.us-east-2.aws.snowflakecomputing.com",
                JdbcUrlParams = "...my_jdbc_url_params...",
                RawDataSchema = "...my_raw_data_schema...",
                RetentionPeriodDays = 9,
                Role = "AIRBYTE_ROLE",
                Schema = "AIRBYTE_SCHEMA",
                Username = "AIRBYTE_USER",
                Warehouse = "AIRBYTE_WAREHOUSE",
            },
            DefinitionId = "fce231ce-04a4-46ec-a244-d1436db0281f",
            Name = "...my_name...",
            WorkspaceId = "058d9730-38a6-485c-8631-dc0cc86125f9",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.airbyte.DestinationSnowflake;
    import com.pulumi.airbyte.DestinationSnowflakeArgs;
    import com.pulumi.airbyte.inputs.DestinationSnowflakeConfigurationArgs;
    import com.pulumi.airbyte.inputs.DestinationSnowflakeConfigurationCredentialsArgs;
    import com.pulumi.airbyte.inputs.DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var myDestinationSnowflake = new DestinationSnowflake("myDestinationSnowflake", DestinationSnowflakeArgs.builder()
                .configuration(DestinationSnowflakeConfigurationArgs.builder()
                    .additionalProperties("{ \"see\": \"documentation\" }")
                    .cdcDeletionMode("Soft delete")
                    .credentials(DestinationSnowflakeConfigurationCredentialsArgs.builder()
                        .keyPairAuthentication(DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs.builder()
                            .additionalProperties("{ \"see\": \"documentation\" }")
                            .authType("Key Pair Authentication")
                            .privateKey("...my_private_key...")
                            .privateKeyPassword("...my_private_key_password...")
                            .build())
                        .build())
                    .database("AIRBYTE_DATABASE")
                    .disableTypeDedupe(true)
                    .host("accountname.us-east-2.aws.snowflakecomputing.com")
                    .jdbcUrlParams("...my_jdbc_url_params...")
                    .rawDataSchema("...my_raw_data_schema...")
                    .retentionPeriodDays(9.0)
                    .role("AIRBYTE_ROLE")
                    .schema("AIRBYTE_SCHEMA")
                    .username("AIRBYTE_USER")
                    .warehouse("AIRBYTE_WAREHOUSE")
                    .build())
                .definitionId("fce231ce-04a4-46ec-a244-d1436db0281f")
                .name("...my_name...")
                .workspaceId("058d9730-38a6-485c-8631-dc0cc86125f9")
                .build());
    
        }
    }
    
    resources:
      myDestinationSnowflake:
        type: airbyte:DestinationSnowflake
        name: my_destination_snowflake
        properties:
          configuration:
            additionalProperties: '{ "see": "documentation" }'
            cdcDeletionMode: Soft delete
            credentials:
              keyPairAuthentication:
                additionalProperties: '{ "see": "documentation" }'
                authType: Key Pair Authentication
                privateKey: '...my_private_key...'
                privateKeyPassword: '...my_private_key_password...'
            database: AIRBYTE_DATABASE
            disableTypeDedupe: true
            host: accountname.us-east-2.aws.snowflakecomputing.com
            jdbcUrlParams: '...my_jdbc_url_params...'
            rawDataSchema: '...my_raw_data_schema...'
            retentionPeriodDays: 9
            role: AIRBYTE_ROLE
            schema: AIRBYTE_SCHEMA
            username: AIRBYTE_USER
            warehouse: AIRBYTE_WAREHOUSE
          definitionId: fce231ce-04a4-46ec-a244-d1436db0281f
          name: '...my_name...'
          workspaceId: 058d9730-38a6-485c-8631-dc0cc86125f9
    

    Create DestinationSnowflake Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new DestinationSnowflake(name: string, args: DestinationSnowflakeArgs, opts?: CustomResourceOptions);
    @overload
    def DestinationSnowflake(resource_name: str,
                             args: DestinationSnowflakeArgs,
                             opts: Optional[ResourceOptions] = None)
    
    @overload
    def DestinationSnowflake(resource_name: str,
                             opts: Optional[ResourceOptions] = None,
                             configuration: Optional[DestinationSnowflakeConfigurationArgs] = None,
                             workspace_id: Optional[str] = None,
                             definition_id: Optional[str] = None,
                             name: Optional[str] = None)
    func NewDestinationSnowflake(ctx *Context, name string, args DestinationSnowflakeArgs, opts ...ResourceOption) (*DestinationSnowflake, error)
    public DestinationSnowflake(string name, DestinationSnowflakeArgs args, CustomResourceOptions? opts = null)
    public DestinationSnowflake(String name, DestinationSnowflakeArgs args)
    public DestinationSnowflake(String name, DestinationSnowflakeArgs args, CustomResourceOptions options)
    
    type: airbyte:DestinationSnowflake
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args DestinationSnowflakeArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args DestinationSnowflakeArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args DestinationSnowflakeArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args DestinationSnowflakeArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args DestinationSnowflakeArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var destinationSnowflakeResource = new Airbyte.DestinationSnowflake("destinationSnowflakeResource", new()
    {
        Configuration = new Airbyte.Inputs.DestinationSnowflakeConfigurationArgs
        {
            Role = "string",
            Database = "string",
            Host = "string",
            Schema = "string",
            Username = "string",
            Warehouse = "string",
            CdcDeletionMode = "string",
            Credentials = new Airbyte.Inputs.DestinationSnowflakeConfigurationCredentialsArgs
            {
                KeyPairAuthentication = new Airbyte.Inputs.DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs
                {
                    PrivateKey = "string",
                    AdditionalProperties = "string",
                    AuthType = "string",
                    PrivateKeyPassword = "string",
                },
                UsernameAndPassword = new Airbyte.Inputs.DestinationSnowflakeConfigurationCredentialsUsernameAndPasswordArgs
                {
                    Password = "string",
                    AdditionalProperties = "string",
                    AuthType = "string",
                },
            },
            DisableTypeDedupe = false,
            JdbcUrlParams = "string",
            RawDataSchema = "string",
            RetentionPeriodDays = 0,
            AdditionalProperties = "string",
        },
        WorkspaceId = "string",
        DefinitionId = "string",
        Name = "string",
    });
    
    example, err := airbyte.NewDestinationSnowflake(ctx, "destinationSnowflakeResource", &airbyte.DestinationSnowflakeArgs{
    	Configuration: &airbyte.DestinationSnowflakeConfigurationArgs{
    		Role:            pulumi.String("string"),
    		Database:        pulumi.String("string"),
    		Host:            pulumi.String("string"),
    		Schema:          pulumi.String("string"),
    		Username:        pulumi.String("string"),
    		Warehouse:       pulumi.String("string"),
    		CdcDeletionMode: pulumi.String("string"),
    		Credentials: &airbyte.DestinationSnowflakeConfigurationCredentialsArgs{
    			KeyPairAuthentication: &airbyte.DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs{
    				PrivateKey:           pulumi.String("string"),
    				AdditionalProperties: pulumi.String("string"),
    				AuthType:             pulumi.String("string"),
    				PrivateKeyPassword:   pulumi.String("string"),
    			},
    			UsernameAndPassword: &airbyte.DestinationSnowflakeConfigurationCredentialsUsernameAndPasswordArgs{
    				Password:             pulumi.String("string"),
    				AdditionalProperties: pulumi.String("string"),
    				AuthType:             pulumi.String("string"),
    			},
    		},
    		DisableTypeDedupe:    pulumi.Bool(false),
    		JdbcUrlParams:        pulumi.String("string"),
    		RawDataSchema:        pulumi.String("string"),
    		RetentionPeriodDays:  pulumi.Float64(0),
    		AdditionalProperties: pulumi.String("string"),
    	},
    	WorkspaceId:  pulumi.String("string"),
    	DefinitionId: pulumi.String("string"),
    	Name:         pulumi.String("string"),
    })
    
    var destinationSnowflakeResource = new DestinationSnowflake("destinationSnowflakeResource", DestinationSnowflakeArgs.builder()
        .configuration(DestinationSnowflakeConfigurationArgs.builder()
            .role("string")
            .database("string")
            .host("string")
            .schema("string")
            .username("string")
            .warehouse("string")
            .cdcDeletionMode("string")
            .credentials(DestinationSnowflakeConfigurationCredentialsArgs.builder()
                .keyPairAuthentication(DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs.builder()
                    .privateKey("string")
                    .additionalProperties("string")
                    .authType("string")
                    .privateKeyPassword("string")
                    .build())
                .usernameAndPassword(DestinationSnowflakeConfigurationCredentialsUsernameAndPasswordArgs.builder()
                    .password("string")
                    .additionalProperties("string")
                    .authType("string")
                    .build())
                .build())
            .disableTypeDedupe(false)
            .jdbcUrlParams("string")
            .rawDataSchema("string")
            .retentionPeriodDays(0.0)
            .additionalProperties("string")
            .build())
        .workspaceId("string")
        .definitionId("string")
        .name("string")
        .build());
    
    destination_snowflake_resource = airbyte.DestinationSnowflake("destinationSnowflakeResource",
        configuration={
            "role": "string",
            "database": "string",
            "host": "string",
            "schema": "string",
            "username": "string",
            "warehouse": "string",
            "cdc_deletion_mode": "string",
            "credentials": {
                "key_pair_authentication": {
                    "private_key": "string",
                    "additional_properties": "string",
                    "auth_type": "string",
                    "private_key_password": "string",
                },
                "username_and_password": {
                    "password": "string",
                    "additional_properties": "string",
                    "auth_type": "string",
                },
            },
            "disable_type_dedupe": False,
            "jdbc_url_params": "string",
            "raw_data_schema": "string",
            "retention_period_days": 0,
            "additional_properties": "string",
        },
        workspace_id="string",
        definition_id="string",
        name="string")
    
    const destinationSnowflakeResource = new airbyte.DestinationSnowflake("destinationSnowflakeResource", {
        configuration: {
            role: "string",
            database: "string",
            host: "string",
            schema: "string",
            username: "string",
            warehouse: "string",
            cdcDeletionMode: "string",
            credentials: {
                keyPairAuthentication: {
                    privateKey: "string",
                    additionalProperties: "string",
                    authType: "string",
                    privateKeyPassword: "string",
                },
                usernameAndPassword: {
                    password: "string",
                    additionalProperties: "string",
                    authType: "string",
                },
            },
            disableTypeDedupe: false,
            jdbcUrlParams: "string",
            rawDataSchema: "string",
            retentionPeriodDays: 0,
            additionalProperties: "string",
        },
        workspaceId: "string",
        definitionId: "string",
        name: "string",
    });
    
    type: airbyte:DestinationSnowflake
    properties:
        configuration:
            additionalProperties: string
            cdcDeletionMode: string
            credentials:
                keyPairAuthentication:
                    additionalProperties: string
                    authType: string
                    privateKey: string
                    privateKeyPassword: string
                usernameAndPassword:
                    additionalProperties: string
                    authType: string
                    password: string
            database: string
            disableTypeDedupe: false
            host: string
            jdbcUrlParams: string
            rawDataSchema: string
            retentionPeriodDays: 0
            role: string
            schema: string
            username: string
            warehouse: string
        definitionId: string
        name: string
        workspaceId: string
    

    DestinationSnowflake Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The DestinationSnowflake resource accepts the following input properties:

    Configuration DestinationSnowflakeConfiguration
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
    Name string
    Name of the destination e.g. dev-mysql-instance.
    Configuration DestinationSnowflakeConfigurationArgs
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
    Name string
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationSnowflakeConfiguration
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
    name String
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationSnowflakeConfiguration
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    workspaceId string
    definitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
    name string
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationSnowflakeConfigurationArgs
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    workspace_id str
    definition_id str
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
    name str
    Name of the destination e.g. dev-mysql-instance.
    configuration Property Map
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
    name String
    Name of the destination e.g. dev-mysql-instance.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the DestinationSnowflake resource produces the following output properties:

    CreatedAt double
    DestinationId string
    DestinationType string
    Id string
    The provider-assigned unique ID for this managed resource.
    ResourceAllocation DestinationSnowflakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    CreatedAt float64
    DestinationId string
    DestinationType string
    Id string
    The provider-assigned unique ID for this managed resource.
    ResourceAllocation DestinationSnowflakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt Double
    destinationId String
    destinationType String
    id String
    The provider-assigned unique ID for this managed resource.
    resourceAllocation DestinationSnowflakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt number
    destinationId string
    destinationType string
    id string
    The provider-assigned unique ID for this managed resource.
    resourceAllocation DestinationSnowflakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    created_at float
    destination_id str
    destination_type str
    id str
    The provider-assigned unique ID for this managed resource.
    resource_allocation DestinationSnowflakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt Number
    destinationId String
    destinationType String
    id String
    The provider-assigned unique ID for this managed resource.
    resourceAllocation Property Map
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.

    Look up Existing DestinationSnowflake Resource

    Get an existing DestinationSnowflake resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: DestinationSnowflakeState, opts?: CustomResourceOptions): DestinationSnowflake
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            configuration: Optional[DestinationSnowflakeConfigurationArgs] = None,
            created_at: Optional[float] = None,
            definition_id: Optional[str] = None,
            destination_id: Optional[str] = None,
            destination_type: Optional[str] = None,
            name: Optional[str] = None,
            resource_allocation: Optional[DestinationSnowflakeResourceAllocationArgs] = None,
            workspace_id: Optional[str] = None) -> DestinationSnowflake
    func GetDestinationSnowflake(ctx *Context, name string, id IDInput, state *DestinationSnowflakeState, opts ...ResourceOption) (*DestinationSnowflake, error)
    public static DestinationSnowflake Get(string name, Input<string> id, DestinationSnowflakeState? state, CustomResourceOptions? opts = null)
    public static DestinationSnowflake get(String name, Output<String> id, DestinationSnowflakeState state, CustomResourceOptions options)
    resources:  _:    type: airbyte:DestinationSnowflake    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Configuration DestinationSnowflakeConfiguration
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    CreatedAt double
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
    DestinationId string
    DestinationType string
    Name string
    Name of the destination e.g. dev-mysql-instance.
    ResourceAllocation DestinationSnowflakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    WorkspaceId string
    Configuration DestinationSnowflakeConfigurationArgs
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    CreatedAt float64
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
    DestinationId string
    DestinationType string
    Name string
    Name of the destination e.g. dev-mysql-instance.
    ResourceAllocation DestinationSnowflakeResourceAllocationArgs
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    WorkspaceId string
    configuration DestinationSnowflakeConfiguration
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    createdAt Double
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
    destinationId String
    destinationType String
    name String
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation DestinationSnowflakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId String
    configuration DestinationSnowflakeConfiguration
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    createdAt number
    definitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
    destinationId string
    destinationType string
    name string
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation DestinationSnowflakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId string
    configuration DestinationSnowflakeConfigurationArgs
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    created_at float
    definition_id str
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
    destination_id str
    destination_type str
    name str
    Name of the destination e.g. dev-mysql-instance.
    resource_allocation DestinationSnowflakeResourceAllocationArgs
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspace_id str
    configuration Property Map
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    createdAt Number
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
    destinationId String
    destinationType String
    name String
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation Property Map
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId String

    Supporting Types

    DestinationSnowflakeConfiguration, DestinationSnowflakeConfigurationArgs

    Database string
    Enter the name of the \n\ndatabase\n\n you want to sync data into
    Host string
    Enter your Snowflake account's \n\nlocator\n\n (in the format \n\n.\n\n.\n\n.snowflakecomputing.com)
    Role string
    Enter the \n\nrole\n\n that you want to use to access Snowflake
    Schema string
    Enter the name of the default \n\nschema\n\n
    Username string
    Enter the name of the user you want to use to access the database
    Warehouse string
    Enter the name of the \n\nwarehouse\n\n that you want to use as a compute cluster
    AdditionalProperties string
    Parsed as JSON.
    CdcDeletionMode string
    Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
    Credentials DestinationSnowflakeConfigurationCredentials
    Determines the type of authentication that should be used.
    DisableTypeDedupe bool
    Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector.
    JdbcUrlParams string
    Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
    RawDataSchema string
    Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
    RetentionPeriodDays double
    The number of days of Snowflake Time Travel to enable on the tables. See \n\nSnowflake's documentation\n\n for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.
    Database string
    Enter the name of the \n\ndatabase\n\n you want to sync data into
    Host string
    Enter your Snowflake account's \n\nlocator\n\n (in the format \n\n.\n\n.\n\n.snowflakecomputing.com)
    Role string
    Enter the \n\nrole\n\n that you want to use to access Snowflake
    Schema string
    Enter the name of the default \n\nschema\n\n
    Username string
    Enter the name of the user you want to use to access the database
    Warehouse string
    Enter the name of the \n\nwarehouse\n\n that you want to use as a compute cluster
    AdditionalProperties string
    Parsed as JSON.
    CdcDeletionMode string
    Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
    Credentials DestinationSnowflakeConfigurationCredentials
    Determines the type of authentication that should be used.
    DisableTypeDedupe bool
    Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector.
    JdbcUrlParams string
    Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
    RawDataSchema string
    Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
    RetentionPeriodDays float64
    The number of days of Snowflake Time Travel to enable on the tables. See \n\nSnowflake's documentation\n\n for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.
    database String
    Enter the name of the \n\ndatabase\n\n you want to sync data into
    host String
    Enter your Snowflake account's \n\nlocator\n\n (in the format \n\n.\n\n.\n\n.snowflakecomputing.com)
    role String
    Enter the \n\nrole\n\n that you want to use to access Snowflake
    schema String
    Enter the name of the default \n\nschema\n\n
    username String
    Enter the name of the user you want to use to access the database
    warehouse String
    Enter the name of the \n\nwarehouse\n\n that you want to use as a compute cluster
    additionalProperties String
    Parsed as JSON.
    cdcDeletionMode String
    Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
    credentials DestinationSnowflakeConfigurationCredentials
    Determines the type of authentication that should be used.
    disableTypeDedupe Boolean
    Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector.
    jdbcUrlParams String
    Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
    rawDataSchema String
    Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
    retentionPeriodDays Double
    The number of days of Snowflake Time Travel to enable on the tables. See \n\nSnowflake's documentation\n\n for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.
    database string
    Enter the name of the \n\ndatabase\n\n you want to sync data into
    host string
    Enter your Snowflake account's \n\nlocator\n\n (in the format \n\n.\n\n.\n\n.snowflakecomputing.com)
    role string
    Enter the \n\nrole\n\n that you want to use to access Snowflake
    schema string
    Enter the name of the default \n\nschema\n\n
    username string
    Enter the name of the user you want to use to access the database
    warehouse string
    Enter the name of the \n\nwarehouse\n\n that you want to use as a compute cluster
    additionalProperties string
    Parsed as JSON.
    cdcDeletionMode string
    Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
    credentials DestinationSnowflakeConfigurationCredentials
    Determines the type of authentication that should be used.
    disableTypeDedupe boolean
    Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector.
    jdbcUrlParams string
    Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
    rawDataSchema string
    Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
    retentionPeriodDays number
    The number of days of Snowflake Time Travel to enable on the tables. See \n\nSnowflake's documentation\n\n for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.
    database str
    Enter the name of the \n\ndatabase\n\n you want to sync data into
    host str
    Enter your Snowflake account's \n\nlocator\n\n (in the format \n\n.\n\n.\n\n.snowflakecomputing.com)
    role str
    Enter the \n\nrole\n\n that you want to use to access Snowflake
    schema str
    Enter the name of the default \n\nschema\n\n
    username str
    Enter the name of the user you want to use to access the database
    warehouse str
    Enter the name of the \n\nwarehouse\n\n that you want to use as a compute cluster
    additional_properties str
    Parsed as JSON.
    cdc_deletion_mode str
    Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
    credentials DestinationSnowflakeConfigurationCredentials
    Determines the type of authentication that should be used.
    disable_type_dedupe bool
    Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector.
    jdbc_url_params str
    Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
    raw_data_schema str
    Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
    retention_period_days float
    The number of days of Snowflake Time Travel to enable on the tables. See \n\nSnowflake's documentation\n\n for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.
    database String
    Enter the name of the \n\ndatabase\n\n you want to sync data into
    host String
    Enter your Snowflake account's \n\nlocator\n\n (in the format \n\n.\n\n.\n\n.snowflakecomputing.com)
    role String
    Enter the \n\nrole\n\n that you want to use to access Snowflake
    schema String
    Enter the name of the default \n\nschema\n\n
    username String
    Enter the name of the user you want to use to access the database
    warehouse String
    Enter the name of the \n\nwarehouse\n\n that you want to use as a compute cluster
    additionalProperties String
    Parsed as JSON.
    cdcDeletionMode String
    Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
    credentials Property Map
    Determines the type of authentication that should be used.
    disableTypeDedupe Boolean
    Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector.
    jdbcUrlParams String
    Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
    rawDataSchema String
    Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
    retentionPeriodDays Number
    The number of days of Snowflake Time Travel to enable on the tables. See \n\nSnowflake's documentation\n\n for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.

    DestinationSnowflakeConfigurationCredentials, DestinationSnowflakeConfigurationCredentialsArgs

    KeyPairAuthentication DestinationSnowflakeConfigurationCredentialsKeyPairAuthentication
    Configuration details for the Key Pair Authentication.
    UsernameAndPassword DestinationSnowflakeConfigurationCredentialsUsernameAndPassword
    Configuration details for the Username and Password Authentication.
    KeyPairAuthentication DestinationSnowflakeConfigurationCredentialsKeyPairAuthentication
    Configuration details for the Key Pair Authentication.
    UsernameAndPassword DestinationSnowflakeConfigurationCredentialsUsernameAndPassword
    Configuration details for the Username and Password Authentication.
    keyPairAuthentication DestinationSnowflakeConfigurationCredentialsKeyPairAuthentication
    Configuration details for the Key Pair Authentication.
    usernameAndPassword DestinationSnowflakeConfigurationCredentialsUsernameAndPassword
    Configuration details for the Username and Password Authentication.
    keyPairAuthentication DestinationSnowflakeConfigurationCredentialsKeyPairAuthentication
    Configuration details for the Key Pair Authentication.
    usernameAndPassword DestinationSnowflakeConfigurationCredentialsUsernameAndPassword
    Configuration details for the Username and Password Authentication.
    key_pair_authentication DestinationSnowflakeConfigurationCredentialsKeyPairAuthentication
    Configuration details for the Key Pair Authentication.
    username_and_password DestinationSnowflakeConfigurationCredentialsUsernameAndPassword
    Configuration details for the Username and Password Authentication.
    keyPairAuthentication Property Map
    Configuration details for the Key Pair Authentication.
    usernameAndPassword Property Map
    Configuration details for the Username and Password Authentication.

    DestinationSnowflakeConfigurationCredentialsKeyPairAuthentication, DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs

    PrivateKey string
    RSA Private key to use for Snowflake connection. See the \n\ndocs\n\n for more information on how to obtain this key.
    AdditionalProperties string
    Parsed as JSON.
    AuthType string
    Default: "Key Pair Authentication"; must be "Key Pair Authentication"
    PrivateKeyPassword string
    Passphrase for private key
    PrivateKey string
    RSA Private key to use for Snowflake connection. See the \n\ndocs\n\n for more information on how to obtain this key.
    AdditionalProperties string
    Parsed as JSON.
    AuthType string
    Default: "Key Pair Authentication"; must be "Key Pair Authentication"
    PrivateKeyPassword string
    Passphrase for private key
    privateKey String
    RSA Private key to use for Snowflake connection. See the \n\ndocs\n\n for more information on how to obtain this key.
    additionalProperties String
    Parsed as JSON.
    authType String
    Default: "Key Pair Authentication"; must be "Key Pair Authentication"
    privateKeyPassword String
    Passphrase for private key
    privateKey string
    RSA Private key to use for Snowflake connection. See the \n\ndocs\n\n for more information on how to obtain this key.
    additionalProperties string
    Parsed as JSON.
    authType string
    Default: "Key Pair Authentication"; must be "Key Pair Authentication"
    privateKeyPassword string
    Passphrase for private key
    private_key str
    RSA Private key to use for Snowflake connection. See the \n\ndocs\n\n for more information on how to obtain this key.
    additional_properties str
    Parsed as JSON.
    auth_type str
    Default: "Key Pair Authentication"; must be "Key Pair Authentication"
    private_key_password str
    Passphrase for private key
    privateKey String
    RSA Private key to use for Snowflake connection. See the \n\ndocs\n\n for more information on how to obtain this key.
    additionalProperties String
    Parsed as JSON.
    authType String
    Default: "Key Pair Authentication"; must be "Key Pair Authentication"
    privateKeyPassword String
    Passphrase for private key

    DestinationSnowflakeConfigurationCredentialsUsernameAndPassword, DestinationSnowflakeConfigurationCredentialsUsernameAndPasswordArgs

    Password string
    Enter the password associated with the username.
    AdditionalProperties string
    Parsed as JSON.
    AuthType string
    Default: "Username and Password"; must be "Username and Password"
    Password string
    Enter the password associated with the username.
    AdditionalProperties string
    Parsed as JSON.
    AuthType string
    Default: "Username and Password"; must be "Username and Password"
    password String
    Enter the password associated with the username.
    additionalProperties String
    Parsed as JSON.
    authType String
    Default: "Username and Password"; must be "Username and Password"
    password string
    Enter the password associated with the username.
    additionalProperties string
    Parsed as JSON.
    authType string
    Default: "Username and Password"; must be "Username and Password"
    password str
    Enter the password associated with the username.
    additional_properties str
    Parsed as JSON.
    auth_type str
    Default: "Username and Password"; must be "Username and Password"
    password String
    Enter the password associated with the username.
    additionalProperties String
    Parsed as JSON.
    authType String
    Default: "Username and Password"; must be "Username and Password"

    DestinationSnowflakeResourceAllocation, DestinationSnowflakeResourceAllocationArgs

    Default DestinationSnowflakeResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    JobSpecifics List<DestinationSnowflakeResourceAllocationJobSpecific>
    Default DestinationSnowflakeResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    JobSpecifics []DestinationSnowflakeResourceAllocationJobSpecific
    default_ DestinationSnowflakeResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics List<DestinationSnowflakeResourceAllocationJobSpecific>
    default DestinationSnowflakeResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics DestinationSnowflakeResourceAllocationJobSpecific[]
    default Property Map
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics List<Property Map>

    DestinationSnowflakeResourceAllocationDefault, DestinationSnowflakeResourceAllocationDefaultArgs

    DestinationSnowflakeResourceAllocationJobSpecific, DestinationSnowflakeResourceAllocationJobSpecificArgs

    JobType string
    enum that describes the different types of jobs that the platform runs.
    ResourceRequirements DestinationSnowflakeResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    JobType string
    enum that describes the different types of jobs that the platform runs.
    ResourceRequirements DestinationSnowflakeResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType String
    enum that describes the different types of jobs that the platform runs.
    resourceRequirements DestinationSnowflakeResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType string
    enum that describes the different types of jobs that the platform runs.
    resourceRequirements DestinationSnowflakeResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    job_type str
    enum that describes the different types of jobs that the platform runs.
    resource_requirements DestinationSnowflakeResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType String
    enum that describes the different types of jobs that the platform runs.
    resourceRequirements Property Map
    optional resource requirements to run workers (blank for unbounded allocations)

    DestinationSnowflakeResourceAllocationJobSpecificResourceRequirements, DestinationSnowflakeResourceAllocationJobSpecificResourceRequirementsArgs

    Import

    In Terraform v1.5.0 and later, the import block can be used with the id attribute, for example:

    terraform

    import {

    to = airbyte_destination_snowflake.my_airbyte_destination_snowflake

    id = “…”

    }

    The pulumi import command can be used, for example:

    $ pulumi import airbyte:index/destinationSnowflake:DestinationSnowflake my_airbyte_destination_snowflake "..."
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    airbyte airbytehq/terraform-provider-airbyte
    License
    Notes
    This Pulumi package is based on the airbyte Terraform Provider.
    airbyte logo
    airbyte 1.0.0-rc6 published on Monday, Feb 16, 2026 by airbytehq
      Meet Neo: Your AI Platform Teammate