DestinationSnowflake Resource
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as airbyte from "@pulumi/airbyte";
const myDestinationSnowflake = new airbyte.DestinationSnowflake("my_destination_snowflake", {
configuration: {
additionalProperties: "{ \"see\": \"documentation\" }",
cdcDeletionMode: "Soft delete",
credentials: {
keyPairAuthentication: {
additionalProperties: "{ \"see\": \"documentation\" }",
authType: "Key Pair Authentication",
privateKey: "...my_private_key...",
privateKeyPassword: "...my_private_key_password...",
},
},
database: "AIRBYTE_DATABASE",
disableTypeDedupe: true,
host: "accountname.us-east-2.aws.snowflakecomputing.com",
jdbcUrlParams: "...my_jdbc_url_params...",
rawDataSchema: "...my_raw_data_schema...",
retentionPeriodDays: 9,
role: "AIRBYTE_ROLE",
schema: "AIRBYTE_SCHEMA",
username: "AIRBYTE_USER",
warehouse: "AIRBYTE_WAREHOUSE",
},
definitionId: "fce231ce-04a4-46ec-a244-d1436db0281f",
name: "...my_name...",
workspaceId: "058d9730-38a6-485c-8631-dc0cc86125f9",
});
import pulumi
import pulumi_airbyte as airbyte
my_destination_snowflake = airbyte.DestinationSnowflake("my_destination_snowflake",
configuration={
"additional_properties": "{ \"see\": \"documentation\" }",
"cdc_deletion_mode": "Soft delete",
"credentials": {
"key_pair_authentication": {
"additional_properties": "{ \"see\": \"documentation\" }",
"auth_type": "Key Pair Authentication",
"private_key": "...my_private_key...",
"private_key_password": "...my_private_key_password...",
},
},
"database": "AIRBYTE_DATABASE",
"disable_type_dedupe": True,
"host": "accountname.us-east-2.aws.snowflakecomputing.com",
"jdbc_url_params": "...my_jdbc_url_params...",
"raw_data_schema": "...my_raw_data_schema...",
"retention_period_days": 9,
"role": "AIRBYTE_ROLE",
"schema": "AIRBYTE_SCHEMA",
"username": "AIRBYTE_USER",
"warehouse": "AIRBYTE_WAREHOUSE",
},
definition_id="fce231ce-04a4-46ec-a244-d1436db0281f",
name="...my_name...",
workspace_id="058d9730-38a6-485c-8631-dc0cc86125f9")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/airbyte/airbyte"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := airbyte.NewDestinationSnowflake(ctx, "my_destination_snowflake", &airbyte.DestinationSnowflakeArgs{
Configuration: &airbyte.DestinationSnowflakeConfigurationArgs{
AdditionalProperties: pulumi.String("{ \"see\": \"documentation\" }"),
CdcDeletionMode: pulumi.String("Soft delete"),
Credentials: &airbyte.DestinationSnowflakeConfigurationCredentialsArgs{
KeyPairAuthentication: &airbyte.DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs{
AdditionalProperties: pulumi.String("{ \"see\": \"documentation\" }"),
AuthType: pulumi.String("Key Pair Authentication"),
PrivateKey: pulumi.String("...my_private_key..."),
PrivateKeyPassword: pulumi.String("...my_private_key_password..."),
},
},
Database: pulumi.String("AIRBYTE_DATABASE"),
DisableTypeDedupe: pulumi.Bool(true),
Host: pulumi.String("accountname.us-east-2.aws.snowflakecomputing.com"),
JdbcUrlParams: pulumi.String("...my_jdbc_url_params..."),
RawDataSchema: pulumi.String("...my_raw_data_schema..."),
RetentionPeriodDays: pulumi.Float64(9),
Role: pulumi.String("AIRBYTE_ROLE"),
Schema: pulumi.String("AIRBYTE_SCHEMA"),
Username: pulumi.String("AIRBYTE_USER"),
Warehouse: pulumi.String("AIRBYTE_WAREHOUSE"),
},
DefinitionId: pulumi.String("fce231ce-04a4-46ec-a244-d1436db0281f"),
Name: pulumi.String("...my_name..."),
WorkspaceId: pulumi.String("058d9730-38a6-485c-8631-dc0cc86125f9"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Airbyte = Pulumi.Airbyte;
return await Deployment.RunAsync(() =>
{
var myDestinationSnowflake = new Airbyte.DestinationSnowflake("my_destination_snowflake", new()
{
Configuration = new Airbyte.Inputs.DestinationSnowflakeConfigurationArgs
{
AdditionalProperties = "{ \"see\": \"documentation\" }",
CdcDeletionMode = "Soft delete",
Credentials = new Airbyte.Inputs.DestinationSnowflakeConfigurationCredentialsArgs
{
KeyPairAuthentication = new Airbyte.Inputs.DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs
{
AdditionalProperties = "{ \"see\": \"documentation\" }",
AuthType = "Key Pair Authentication",
PrivateKey = "...my_private_key...",
PrivateKeyPassword = "...my_private_key_password...",
},
},
Database = "AIRBYTE_DATABASE",
DisableTypeDedupe = true,
Host = "accountname.us-east-2.aws.snowflakecomputing.com",
JdbcUrlParams = "...my_jdbc_url_params...",
RawDataSchema = "...my_raw_data_schema...",
RetentionPeriodDays = 9,
Role = "AIRBYTE_ROLE",
Schema = "AIRBYTE_SCHEMA",
Username = "AIRBYTE_USER",
Warehouse = "AIRBYTE_WAREHOUSE",
},
DefinitionId = "fce231ce-04a4-46ec-a244-d1436db0281f",
Name = "...my_name...",
WorkspaceId = "058d9730-38a6-485c-8631-dc0cc86125f9",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.airbyte.DestinationSnowflake;
import com.pulumi.airbyte.DestinationSnowflakeArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeConfigurationArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeConfigurationCredentialsArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var myDestinationSnowflake = new DestinationSnowflake("myDestinationSnowflake", DestinationSnowflakeArgs.builder()
.configuration(DestinationSnowflakeConfigurationArgs.builder()
.additionalProperties("{ \"see\": \"documentation\" }")
.cdcDeletionMode("Soft delete")
.credentials(DestinationSnowflakeConfigurationCredentialsArgs.builder()
.keyPairAuthentication(DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs.builder()
.additionalProperties("{ \"see\": \"documentation\" }")
.authType("Key Pair Authentication")
.privateKey("...my_private_key...")
.privateKeyPassword("...my_private_key_password...")
.build())
.build())
.database("AIRBYTE_DATABASE")
.disableTypeDedupe(true)
.host("accountname.us-east-2.aws.snowflakecomputing.com")
.jdbcUrlParams("...my_jdbc_url_params...")
.rawDataSchema("...my_raw_data_schema...")
.retentionPeriodDays(9.0)
.role("AIRBYTE_ROLE")
.schema("AIRBYTE_SCHEMA")
.username("AIRBYTE_USER")
.warehouse("AIRBYTE_WAREHOUSE")
.build())
.definitionId("fce231ce-04a4-46ec-a244-d1436db0281f")
.name("...my_name...")
.workspaceId("058d9730-38a6-485c-8631-dc0cc86125f9")
.build());
}
}
resources:
myDestinationSnowflake:
type: airbyte:DestinationSnowflake
name: my_destination_snowflake
properties:
configuration:
additionalProperties: '{ "see": "documentation" }'
cdcDeletionMode: Soft delete
credentials:
keyPairAuthentication:
additionalProperties: '{ "see": "documentation" }'
authType: Key Pair Authentication
privateKey: '...my_private_key...'
privateKeyPassword: '...my_private_key_password...'
database: AIRBYTE_DATABASE
disableTypeDedupe: true
host: accountname.us-east-2.aws.snowflakecomputing.com
jdbcUrlParams: '...my_jdbc_url_params...'
rawDataSchema: '...my_raw_data_schema...'
retentionPeriodDays: 9
role: AIRBYTE_ROLE
schema: AIRBYTE_SCHEMA
username: AIRBYTE_USER
warehouse: AIRBYTE_WAREHOUSE
definitionId: fce231ce-04a4-46ec-a244-d1436db0281f
name: '...my_name...'
workspaceId: 058d9730-38a6-485c-8631-dc0cc86125f9
Create DestinationSnowflake Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new DestinationSnowflake(name: string, args: DestinationSnowflakeArgs, opts?: CustomResourceOptions);@overload
def DestinationSnowflake(resource_name: str,
args: DestinationSnowflakeArgs,
opts: Optional[ResourceOptions] = None)
@overload
def DestinationSnowflake(resource_name: str,
opts: Optional[ResourceOptions] = None,
configuration: Optional[DestinationSnowflakeConfigurationArgs] = None,
workspace_id: Optional[str] = None,
definition_id: Optional[str] = None,
name: Optional[str] = None)func NewDestinationSnowflake(ctx *Context, name string, args DestinationSnowflakeArgs, opts ...ResourceOption) (*DestinationSnowflake, error)public DestinationSnowflake(string name, DestinationSnowflakeArgs args, CustomResourceOptions? opts = null)
public DestinationSnowflake(String name, DestinationSnowflakeArgs args)
public DestinationSnowflake(String name, DestinationSnowflakeArgs args, CustomResourceOptions options)
type: airbyte:DestinationSnowflake
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args DestinationSnowflakeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DestinationSnowflakeArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DestinationSnowflakeArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DestinationSnowflakeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DestinationSnowflakeArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var destinationSnowflakeResource = new Airbyte.DestinationSnowflake("destinationSnowflakeResource", new()
{
Configuration = new Airbyte.Inputs.DestinationSnowflakeConfigurationArgs
{
Role = "string",
Database = "string",
Host = "string",
Schema = "string",
Username = "string",
Warehouse = "string",
CdcDeletionMode = "string",
Credentials = new Airbyte.Inputs.DestinationSnowflakeConfigurationCredentialsArgs
{
KeyPairAuthentication = new Airbyte.Inputs.DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs
{
PrivateKey = "string",
AdditionalProperties = "string",
AuthType = "string",
PrivateKeyPassword = "string",
},
UsernameAndPassword = new Airbyte.Inputs.DestinationSnowflakeConfigurationCredentialsUsernameAndPasswordArgs
{
Password = "string",
AdditionalProperties = "string",
AuthType = "string",
},
},
DisableTypeDedupe = false,
JdbcUrlParams = "string",
RawDataSchema = "string",
RetentionPeriodDays = 0,
AdditionalProperties = "string",
},
WorkspaceId = "string",
DefinitionId = "string",
Name = "string",
});
example, err := airbyte.NewDestinationSnowflake(ctx, "destinationSnowflakeResource", &airbyte.DestinationSnowflakeArgs{
Configuration: &airbyte.DestinationSnowflakeConfigurationArgs{
Role: pulumi.String("string"),
Database: pulumi.String("string"),
Host: pulumi.String("string"),
Schema: pulumi.String("string"),
Username: pulumi.String("string"),
Warehouse: pulumi.String("string"),
CdcDeletionMode: pulumi.String("string"),
Credentials: &airbyte.DestinationSnowflakeConfigurationCredentialsArgs{
KeyPairAuthentication: &airbyte.DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs{
PrivateKey: pulumi.String("string"),
AdditionalProperties: pulumi.String("string"),
AuthType: pulumi.String("string"),
PrivateKeyPassword: pulumi.String("string"),
},
UsernameAndPassword: &airbyte.DestinationSnowflakeConfigurationCredentialsUsernameAndPasswordArgs{
Password: pulumi.String("string"),
AdditionalProperties: pulumi.String("string"),
AuthType: pulumi.String("string"),
},
},
DisableTypeDedupe: pulumi.Bool(false),
JdbcUrlParams: pulumi.String("string"),
RawDataSchema: pulumi.String("string"),
RetentionPeriodDays: pulumi.Float64(0),
AdditionalProperties: pulumi.String("string"),
},
WorkspaceId: pulumi.String("string"),
DefinitionId: pulumi.String("string"),
Name: pulumi.String("string"),
})
var destinationSnowflakeResource = new DestinationSnowflake("destinationSnowflakeResource", DestinationSnowflakeArgs.builder()
.configuration(DestinationSnowflakeConfigurationArgs.builder()
.role("string")
.database("string")
.host("string")
.schema("string")
.username("string")
.warehouse("string")
.cdcDeletionMode("string")
.credentials(DestinationSnowflakeConfigurationCredentialsArgs.builder()
.keyPairAuthentication(DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs.builder()
.privateKey("string")
.additionalProperties("string")
.authType("string")
.privateKeyPassword("string")
.build())
.usernameAndPassword(DestinationSnowflakeConfigurationCredentialsUsernameAndPasswordArgs.builder()
.password("string")
.additionalProperties("string")
.authType("string")
.build())
.build())
.disableTypeDedupe(false)
.jdbcUrlParams("string")
.rawDataSchema("string")
.retentionPeriodDays(0.0)
.additionalProperties("string")
.build())
.workspaceId("string")
.definitionId("string")
.name("string")
.build());
destination_snowflake_resource = airbyte.DestinationSnowflake("destinationSnowflakeResource",
configuration={
"role": "string",
"database": "string",
"host": "string",
"schema": "string",
"username": "string",
"warehouse": "string",
"cdc_deletion_mode": "string",
"credentials": {
"key_pair_authentication": {
"private_key": "string",
"additional_properties": "string",
"auth_type": "string",
"private_key_password": "string",
},
"username_and_password": {
"password": "string",
"additional_properties": "string",
"auth_type": "string",
},
},
"disable_type_dedupe": False,
"jdbc_url_params": "string",
"raw_data_schema": "string",
"retention_period_days": 0,
"additional_properties": "string",
},
workspace_id="string",
definition_id="string",
name="string")
const destinationSnowflakeResource = new airbyte.DestinationSnowflake("destinationSnowflakeResource", {
configuration: {
role: "string",
database: "string",
host: "string",
schema: "string",
username: "string",
warehouse: "string",
cdcDeletionMode: "string",
credentials: {
keyPairAuthentication: {
privateKey: "string",
additionalProperties: "string",
authType: "string",
privateKeyPassword: "string",
},
usernameAndPassword: {
password: "string",
additionalProperties: "string",
authType: "string",
},
},
disableTypeDedupe: false,
jdbcUrlParams: "string",
rawDataSchema: "string",
retentionPeriodDays: 0,
additionalProperties: "string",
},
workspaceId: "string",
definitionId: "string",
name: "string",
});
type: airbyte:DestinationSnowflake
properties:
configuration:
additionalProperties: string
cdcDeletionMode: string
credentials:
keyPairAuthentication:
additionalProperties: string
authType: string
privateKey: string
privateKeyPassword: string
usernameAndPassword:
additionalProperties: string
authType: string
password: string
database: string
disableTypeDedupe: false
host: string
jdbcUrlParams: string
rawDataSchema: string
retentionPeriodDays: 0
role: string
schema: string
username: string
warehouse: string
definitionId: string
name: string
workspaceId: string
DestinationSnowflake Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The DestinationSnowflake resource accepts the following input properties:
- Configuration
Destination
Snowflake Configuration - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- Workspace
Id string - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
- Name string
- Name of the destination e.g. dev-mysql-instance.
- Configuration
Destination
Snowflake Configuration Args - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- Workspace
Id string - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
- Name string
- Name of the destination e.g. dev-mysql-instance.
- configuration
Destination
Snowflake Configuration - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- workspace
Id String - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
- name String
- Name of the destination e.g. dev-mysql-instance.
- configuration
Destination
Snowflake Configuration - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- workspace
Id string - definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
- name string
- Name of the destination e.g. dev-mysql-instance.
- configuration
Destination
Snowflake Configuration Args - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- workspace_
id str - definition_
id str - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
- name str
- Name of the destination e.g. dev-mysql-instance.
- configuration Property Map
- The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- workspace
Id String - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
- name String
- Name of the destination e.g. dev-mysql-instance.
Outputs
All input properties are implicitly available as output properties. Additionally, the DestinationSnowflake resource produces the following output properties:
- Created
At double - Destination
Id string - Destination
Type string - Id string
- The provider-assigned unique ID for this managed resource.
- Resource
Allocation DestinationSnowflake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Created
At float64 - Destination
Id string - Destination
Type string - Id string
- The provider-assigned unique ID for this managed resource.
- Resource
Allocation DestinationSnowflake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created
At Double - destination
Id String - destination
Type String - id String
- The provider-assigned unique ID for this managed resource.
- resource
Allocation DestinationSnowflake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created
At number - destination
Id string - destination
Type string - id string
- The provider-assigned unique ID for this managed resource.
- resource
Allocation DestinationSnowflake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created_
at float - destination_
id str - destination_
type str - id str
- The provider-assigned unique ID for this managed resource.
- resource_
allocation DestinationSnowflake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created
At Number - destination
Id String - destination
Type String - id String
- The provider-assigned unique ID for this managed resource.
- resource
Allocation Property Map - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
Look up Existing DestinationSnowflake Resource
Get an existing DestinationSnowflake resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DestinationSnowflakeState, opts?: CustomResourceOptions): DestinationSnowflake@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
configuration: Optional[DestinationSnowflakeConfigurationArgs] = None,
created_at: Optional[float] = None,
definition_id: Optional[str] = None,
destination_id: Optional[str] = None,
destination_type: Optional[str] = None,
name: Optional[str] = None,
resource_allocation: Optional[DestinationSnowflakeResourceAllocationArgs] = None,
workspace_id: Optional[str] = None) -> DestinationSnowflakefunc GetDestinationSnowflake(ctx *Context, name string, id IDInput, state *DestinationSnowflakeState, opts ...ResourceOption) (*DestinationSnowflake, error)public static DestinationSnowflake Get(string name, Input<string> id, DestinationSnowflakeState? state, CustomResourceOptions? opts = null)public static DestinationSnowflake get(String name, Output<String> id, DestinationSnowflakeState state, CustomResourceOptions options)resources: _: type: airbyte:DestinationSnowflake get: id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Configuration
Destination
Snowflake Configuration - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- Created
At double - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
- Destination
Id string - Destination
Type string - Name string
- Name of the destination e.g. dev-mysql-instance.
- Resource
Allocation DestinationSnowflake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Workspace
Id string
- Configuration
Destination
Snowflake Configuration Args - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- Created
At float64 - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
- Destination
Id string - Destination
Type string - Name string
- Name of the destination e.g. dev-mysql-instance.
- Resource
Allocation DestinationSnowflake Resource Allocation Args - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Workspace
Id string
- configuration
Destination
Snowflake Configuration - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- created
At Double - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
- destination
Id String - destination
Type String - name String
- Name of the destination e.g. dev-mysql-instance.
- resource
Allocation DestinationSnowflake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace
Id String
- configuration
Destination
Snowflake Configuration - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- created
At number - definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
- destination
Id string - destination
Type string - name string
- Name of the destination e.g. dev-mysql-instance.
- resource
Allocation DestinationSnowflake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace
Id string
- configuration
Destination
Snowflake Configuration Args - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- created_
at float - definition_
id str - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
- destination_
id str - destination_
type str - name str
- Name of the destination e.g. dev-mysql-instance.
- resource_
allocation DestinationSnowflake Resource Allocation Args - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace_
id str
- configuration Property Map
- The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- created
At Number - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "424892c4-daac-4491-b35d-c6688ba547ba"; Requires replacement if changed.
- destination
Id String - destination
Type String - name String
- Name of the destination e.g. dev-mysql-instance.
- resource
Allocation Property Map - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace
Id String
Supporting Types
DestinationSnowflakeConfiguration, DestinationSnowflakeConfigurationArgs
- Database string
- Enter the name of the \n\ndatabase\n\n you want to sync data into
- Host string
- Enter your Snowflake account's \n\nlocator\n\n (in the format \n\n.\n\n.\n\n.snowflakecomputing.com)
- Role string
- Enter the \n\nrole\n\n that you want to use to access Snowflake
- Schema string
- Enter the name of the default \n\nschema\n\n
- Username string
- Enter the name of the user you want to use to access the database
- Warehouse string
- Enter the name of the \n\nwarehouse\n\n that you want to use as a compute cluster
- Additional
Properties string - Parsed as JSON.
- Cdc
Deletion stringMode - Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
- Credentials
Destination
Snowflake Configuration Credentials - Determines the type of authentication that should be used.
- Disable
Type boolDedupe - Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector.
- Jdbc
Url stringParams - Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
- Raw
Data stringSchema - Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
- Retention
Period doubleDays - The number of days of Snowflake Time Travel to enable on the tables. See \n\nSnowflake's documentation\n\n for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.
- Database string
- Enter the name of the \n\ndatabase\n\n you want to sync data into
- Host string
- Enter your Snowflake account's \n\nlocator\n\n (in the format \n\n.\n\n.\n\n.snowflakecomputing.com)
- Role string
- Enter the \n\nrole\n\n that you want to use to access Snowflake
- Schema string
- Enter the name of the default \n\nschema\n\n
- Username string
- Enter the name of the user you want to use to access the database
- Warehouse string
- Enter the name of the \n\nwarehouse\n\n that you want to use as a compute cluster
- Additional
Properties string - Parsed as JSON.
- Cdc
Deletion stringMode - Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
- Credentials
Destination
Snowflake Configuration Credentials - Determines the type of authentication that should be used.
- Disable
Type boolDedupe - Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector.
- Jdbc
Url stringParams - Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
- Raw
Data stringSchema - Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
- Retention
Period float64Days - The number of days of Snowflake Time Travel to enable on the tables. See \n\nSnowflake's documentation\n\n for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.
- database String
- Enter the name of the \n\ndatabase\n\n you want to sync data into
- host String
- Enter your Snowflake account's \n\nlocator\n\n (in the format \n\n.\n\n.\n\n.snowflakecomputing.com)
- role String
- Enter the \n\nrole\n\n that you want to use to access Snowflake
- schema String
- Enter the name of the default \n\nschema\n\n
- username String
- Enter the name of the user you want to use to access the database
- warehouse String
- Enter the name of the \n\nwarehouse\n\n that you want to use as a compute cluster
- additional
Properties String - Parsed as JSON.
- cdc
Deletion StringMode - Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
- credentials
Destination
Snowflake Configuration Credentials - Determines the type of authentication that should be used.
- disable
Type BooleanDedupe - Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector.
- jdbc
Url StringParams - Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
- raw
Data StringSchema - Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
- retention
Period DoubleDays - The number of days of Snowflake Time Travel to enable on the tables. See \n\nSnowflake's documentation\n\n for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.
- database string
- Enter the name of the \n\ndatabase\n\n you want to sync data into
- host string
- Enter your Snowflake account's \n\nlocator\n\n (in the format \n\n.\n\n.\n\n.snowflakecomputing.com)
- role string
- Enter the \n\nrole\n\n that you want to use to access Snowflake
- schema string
- Enter the name of the default \n\nschema\n\n
- username string
- Enter the name of the user you want to use to access the database
- warehouse string
- Enter the name of the \n\nwarehouse\n\n that you want to use as a compute cluster
- additional
Properties string - Parsed as JSON.
- cdc
Deletion stringMode - Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
- credentials
Destination
Snowflake Configuration Credentials - Determines the type of authentication that should be used.
- disable
Type booleanDedupe - Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector.
- jdbc
Url stringParams - Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
- raw
Data stringSchema - Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
- retention
Period numberDays - The number of days of Snowflake Time Travel to enable on the tables. See \n\nSnowflake's documentation\n\n for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.
- database str
- Enter the name of the \n\ndatabase\n\n you want to sync data into
- host str
- Enter your Snowflake account's \n\nlocator\n\n (in the format \n\n.\n\n.\n\n.snowflakecomputing.com)
- role str
- Enter the \n\nrole\n\n that you want to use to access Snowflake
- schema str
- Enter the name of the default \n\nschema\n\n
- username str
- Enter the name of the user you want to use to access the database
- warehouse str
- Enter the name of the \n\nwarehouse\n\n that you want to use as a compute cluster
- additional_
properties str - Parsed as JSON.
- cdc_
deletion_ strmode - Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
- credentials
Destination
Snowflake Configuration Credentials - Determines the type of authentication that should be used.
- disable_
type_ booldedupe - Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector.
- jdbc_
url_ strparams - Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
- raw_
data_ strschema - Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
- retention_
period_ floatdays - The number of days of Snowflake Time Travel to enable on the tables. See \n\nSnowflake's documentation\n\n for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.
- database String
- Enter the name of the \n\ndatabase\n\n you want to sync data into
- host String
- Enter your Snowflake account's \n\nlocator\n\n (in the format \n\n.\n\n.\n\n.snowflakecomputing.com)
- role String
- Enter the \n\nrole\n\n that you want to use to access Snowflake
- schema String
- Enter the name of the default \n\nschema\n\n
- username String
- Enter the name of the user you want to use to access the database
- warehouse String
- Enter the name of the \n\nwarehouse\n\n that you want to use as a compute cluster
- additional
Properties String - Parsed as JSON.
- cdc
Deletion StringMode - Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
- credentials Property Map
- Determines the type of authentication that should be used.
- disable
Type BooleanDedupe - Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector.
- jdbc
Url StringParams - Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3
- raw
Data StringSchema - Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
- retention
Period NumberDays - The number of days of Snowflake Time Travel to enable on the tables. See \n\nSnowflake's documentation\n\n for more information. Setting a nonzero value will incur increased storage costs in your Snowflake instance.
DestinationSnowflakeConfigurationCredentials, DestinationSnowflakeConfigurationCredentialsArgs
- Key
Pair DestinationAuthentication Snowflake Configuration Credentials Key Pair Authentication - Configuration details for the Key Pair Authentication.
- Username
And DestinationPassword Snowflake Configuration Credentials Username And Password - Configuration details for the Username and Password Authentication.
- Key
Pair DestinationAuthentication Snowflake Configuration Credentials Key Pair Authentication - Configuration details for the Key Pair Authentication.
- Username
And DestinationPassword Snowflake Configuration Credentials Username And Password - Configuration details for the Username and Password Authentication.
- key
Pair DestinationAuthentication Snowflake Configuration Credentials Key Pair Authentication - Configuration details for the Key Pair Authentication.
- username
And DestinationPassword Snowflake Configuration Credentials Username And Password - Configuration details for the Username and Password Authentication.
- key
Pair DestinationAuthentication Snowflake Configuration Credentials Key Pair Authentication - Configuration details for the Key Pair Authentication.
- username
And DestinationPassword Snowflake Configuration Credentials Username And Password - Configuration details for the Username and Password Authentication.
- key_
pair_ Destinationauthentication Snowflake Configuration Credentials Key Pair Authentication - Configuration details for the Key Pair Authentication.
- username_
and_ Destinationpassword Snowflake Configuration Credentials Username And Password - Configuration details for the Username and Password Authentication.
- key
Pair Property MapAuthentication - Configuration details for the Key Pair Authentication.
- username
And Property MapPassword - Configuration details for the Username and Password Authentication.
DestinationSnowflakeConfigurationCredentialsKeyPairAuthentication, DestinationSnowflakeConfigurationCredentialsKeyPairAuthenticationArgs
- Private
Key string - RSA Private key to use for Snowflake connection. See the \n\ndocs\n\n for more information on how to obtain this key.
- Additional
Properties string - Parsed as JSON.
- Auth
Type string - Default: "Key Pair Authentication"; must be "Key Pair Authentication"
- Private
Key stringPassword - Passphrase for private key
- Private
Key string - RSA Private key to use for Snowflake connection. See the \n\ndocs\n\n for more information on how to obtain this key.
- Additional
Properties string - Parsed as JSON.
- Auth
Type string - Default: "Key Pair Authentication"; must be "Key Pair Authentication"
- Private
Key stringPassword - Passphrase for private key
- private
Key String - RSA Private key to use for Snowflake connection. See the \n\ndocs\n\n for more information on how to obtain this key.
- additional
Properties String - Parsed as JSON.
- auth
Type String - Default: "Key Pair Authentication"; must be "Key Pair Authentication"
- private
Key StringPassword - Passphrase for private key
- private
Key string - RSA Private key to use for Snowflake connection. See the \n\ndocs\n\n for more information on how to obtain this key.
- additional
Properties string - Parsed as JSON.
- auth
Type string - Default: "Key Pair Authentication"; must be "Key Pair Authentication"
- private
Key stringPassword - Passphrase for private key
- private_
key str - RSA Private key to use for Snowflake connection. See the \n\ndocs\n\n for more information on how to obtain this key.
- additional_
properties str - Parsed as JSON.
- auth_
type str - Default: "Key Pair Authentication"; must be "Key Pair Authentication"
- private_
key_ strpassword - Passphrase for private key
- private
Key String - RSA Private key to use for Snowflake connection. See the \n\ndocs\n\n for more information on how to obtain this key.
- additional
Properties String - Parsed as JSON.
- auth
Type String - Default: "Key Pair Authentication"; must be "Key Pair Authentication"
- private
Key StringPassword - Passphrase for private key
DestinationSnowflakeConfigurationCredentialsUsernameAndPassword, DestinationSnowflakeConfigurationCredentialsUsernameAndPasswordArgs
- Password string
- Enter the password associated with the username.
- Additional
Properties string - Parsed as JSON.
- Auth
Type string - Default: "Username and Password"; must be "Username and Password"
- Password string
- Enter the password associated with the username.
- Additional
Properties string - Parsed as JSON.
- Auth
Type string - Default: "Username and Password"; must be "Username and Password"
- password String
- Enter the password associated with the username.
- additional
Properties String - Parsed as JSON.
- auth
Type String - Default: "Username and Password"; must be "Username and Password"
- password string
- Enter the password associated with the username.
- additional
Properties string - Parsed as JSON.
- auth
Type string - Default: "Username and Password"; must be "Username and Password"
- password str
- Enter the password associated with the username.
- additional_
properties str - Parsed as JSON.
- auth_
type str - Default: "Username and Password"; must be "Username and Password"
- password String
- Enter the password associated with the username.
- additional
Properties String - Parsed as JSON.
- auth
Type String - Default: "Username and Password"; must be "Username and Password"
DestinationSnowflakeResourceAllocation, DestinationSnowflakeResourceAllocationArgs
- Default
Destination
Snowflake Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Specifics List<DestinationSnowflake Resource Allocation Job Specific>
- Default
Destination
Snowflake Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Specifics []DestinationSnowflake Resource Allocation Job Specific
- default_
Destination
Snowflake Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics List<DestinationSnowflake Resource Allocation Job Specific>
- default
Destination
Snowflake Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics DestinationSnowflake Resource Allocation Job Specific[]
- default
Destination
Snowflake Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job_
specifics Sequence[DestinationSnowflake Resource Allocation Job Specific]
- default Property Map
- optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics List<Property Map>
DestinationSnowflakeResourceAllocationDefault, DestinationSnowflakeResourceAllocationDefaultArgs
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
- cpu
Limit string - cpu
Request string - ephemeral
Storage stringLimit - ephemeral
Storage stringRequest - memory
Limit string - memory
Request string
- cpu_
limit str - cpu_
request str - ephemeral_
storage_ strlimit - ephemeral_
storage_ strrequest - memory_
limit str - memory_
request str
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
DestinationSnowflakeResourceAllocationJobSpecific, DestinationSnowflakeResourceAllocationJobSpecificArgs
- Job
Type string - enum that describes the different types of jobs that the platform runs.
- Resource
Requirements DestinationSnowflake Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Type string - enum that describes the different types of jobs that the platform runs.
- Resource
Requirements DestinationSnowflake Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type String - enum that describes the different types of jobs that the platform runs.
- resource
Requirements DestinationSnowflake Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type string - enum that describes the different types of jobs that the platform runs.
- resource
Requirements DestinationSnowflake Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job_
type str - enum that describes the different types of jobs that the platform runs.
- resource_
requirements DestinationSnowflake Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type String - enum that describes the different types of jobs that the platform runs.
- resource
Requirements Property Map - optional resource requirements to run workers (blank for unbounded allocations)
DestinationSnowflakeResourceAllocationJobSpecificResourceRequirements, DestinationSnowflakeResourceAllocationJobSpecificResourceRequirementsArgs
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
- cpu
Limit string - cpu
Request string - ephemeral
Storage stringLimit - ephemeral
Storage stringRequest - memory
Limit string - memory
Request string
- cpu_
limit str - cpu_
request str - ephemeral_
storage_ strlimit - ephemeral_
storage_ strrequest - memory_
limit str - memory_
request str
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
Import
In Terraform v1.5.0 and later, the import block can be used with the id attribute, for example:
terraform
import {
to = airbyte_destination_snowflake.my_airbyte_destination_snowflake
id = “…”
}
The pulumi import command can be used, for example:
$ pulumi import airbyte:index/destinationSnowflake:DestinationSnowflake my_airbyte_destination_snowflake "..."
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- airbyte airbytehq/terraform-provider-airbyte
- License
- Notes
- This Pulumi package is based on the
airbyteTerraform Provider.
