DestinationBigquery Resource
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as airbyte from "@pulumi/airbyte";
const myDestinationBigquery = new airbyte.DestinationBigquery("my_destination_bigquery", {
configuration: {
additionalProperties: "{ \"see\": \"documentation\" }",
cdcDeletionMode: "Soft delete",
credentialsJson: "...my_credentials_json...",
datasetId: "...my_dataset_id...",
datasetLocation: "EU",
disableTypeDedupe: true,
loadingMethod: {
batchedStandardInserts: {
additionalProperties: "{ \"see\": \"documentation\" }",
method: "Standard",
},
},
projectId: "...my_project_id...",
rawDataDataset: "...my_raw_data_dataset...",
},
definitionId: "92c3eb2b-6d61-4610-adf2-eee065419ed9",
name: "...my_name...",
workspaceId: "acee73dd-54d3-476f-a8ea-d39d218f52cd",
});
import pulumi
import pulumi_airbyte as airbyte
my_destination_bigquery = airbyte.DestinationBigquery("my_destination_bigquery",
configuration={
"additional_properties": "{ \"see\": \"documentation\" }",
"cdc_deletion_mode": "Soft delete",
"credentials_json": "...my_credentials_json...",
"dataset_id": "...my_dataset_id...",
"dataset_location": "EU",
"disable_type_dedupe": True,
"loading_method": {
"batched_standard_inserts": {
"additional_properties": "{ \"see\": \"documentation\" }",
"method": "Standard",
},
},
"project_id": "...my_project_id...",
"raw_data_dataset": "...my_raw_data_dataset...",
},
definition_id="92c3eb2b-6d61-4610-adf2-eee065419ed9",
name="...my_name...",
workspace_id="acee73dd-54d3-476f-a8ea-d39d218f52cd")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/airbyte/airbyte"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := airbyte.NewDestinationBigquery(ctx, "my_destination_bigquery", &airbyte.DestinationBigqueryArgs{
Configuration: &airbyte.DestinationBigqueryConfigurationArgs{
AdditionalProperties: pulumi.String("{ \"see\": \"documentation\" }"),
CdcDeletionMode: pulumi.String("Soft delete"),
CredentialsJson: pulumi.String("...my_credentials_json..."),
DatasetId: pulumi.String("...my_dataset_id..."),
DatasetLocation: pulumi.String("EU"),
DisableTypeDedupe: pulumi.Bool(true),
LoadingMethod: &airbyte.DestinationBigqueryConfigurationLoadingMethodArgs{
BatchedStandardInserts: &airbyte.DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs{
AdditionalProperties: pulumi.String("{ \"see\": \"documentation\" }"),
Method: pulumi.String("Standard"),
},
},
ProjectId: pulumi.String("...my_project_id..."),
RawDataDataset: pulumi.String("...my_raw_data_dataset..."),
},
DefinitionId: pulumi.String("92c3eb2b-6d61-4610-adf2-eee065419ed9"),
Name: pulumi.String("...my_name..."),
WorkspaceId: pulumi.String("acee73dd-54d3-476f-a8ea-d39d218f52cd"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Airbyte = Pulumi.Airbyte;
return await Deployment.RunAsync(() =>
{
var myDestinationBigquery = new Airbyte.DestinationBigquery("my_destination_bigquery", new()
{
Configuration = new Airbyte.Inputs.DestinationBigqueryConfigurationArgs
{
AdditionalProperties = "{ \"see\": \"documentation\" }",
CdcDeletionMode = "Soft delete",
CredentialsJson = "...my_credentials_json...",
DatasetId = "...my_dataset_id...",
DatasetLocation = "EU",
DisableTypeDedupe = true,
LoadingMethod = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodArgs
{
BatchedStandardInserts = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs
{
AdditionalProperties = "{ \"see\": \"documentation\" }",
Method = "Standard",
},
},
ProjectId = "...my_project_id...",
RawDataDataset = "...my_raw_data_dataset...",
},
DefinitionId = "92c3eb2b-6d61-4610-adf2-eee065419ed9",
Name = "...my_name...",
WorkspaceId = "acee73dd-54d3-476f-a8ea-d39d218f52cd",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.airbyte.DestinationBigquery;
import com.pulumi.airbyte.DestinationBigqueryArgs;
import com.pulumi.airbyte.inputs.DestinationBigqueryConfigurationArgs;
import com.pulumi.airbyte.inputs.DestinationBigqueryConfigurationLoadingMethodArgs;
import com.pulumi.airbyte.inputs.DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var myDestinationBigquery = new DestinationBigquery("myDestinationBigquery", DestinationBigqueryArgs.builder()
.configuration(DestinationBigqueryConfigurationArgs.builder()
.additionalProperties("{ \"see\": \"documentation\" }")
.cdcDeletionMode("Soft delete")
.credentialsJson("...my_credentials_json...")
.datasetId("...my_dataset_id...")
.datasetLocation("EU")
.disableTypeDedupe(true)
.loadingMethod(DestinationBigqueryConfigurationLoadingMethodArgs.builder()
.batchedStandardInserts(DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs.builder()
.additionalProperties("{ \"see\": \"documentation\" }")
.method("Standard")
.build())
.build())
.projectId("...my_project_id...")
.rawDataDataset("...my_raw_data_dataset...")
.build())
.definitionId("92c3eb2b-6d61-4610-adf2-eee065419ed9")
.name("...my_name...")
.workspaceId("acee73dd-54d3-476f-a8ea-d39d218f52cd")
.build());
}
}
resources:
myDestinationBigquery:
type: airbyte:DestinationBigquery
name: my_destination_bigquery
properties:
configuration:
additionalProperties: '{ "see": "documentation" }'
cdcDeletionMode: Soft delete
credentialsJson: '...my_credentials_json...'
datasetId: '...my_dataset_id...'
datasetLocation: EU
disableTypeDedupe: true
loadingMethod:
batchedStandardInserts:
additionalProperties: '{ "see": "documentation" }'
method: Standard
projectId: '...my_project_id...'
rawDataDataset: '...my_raw_data_dataset...'
definitionId: 92c3eb2b-6d61-4610-adf2-eee065419ed9
name: '...my_name...'
workspaceId: acee73dd-54d3-476f-a8ea-d39d218f52cd
Create DestinationBigquery Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new DestinationBigquery(name: string, args: DestinationBigqueryArgs, opts?: CustomResourceOptions);@overload
def DestinationBigquery(resource_name: str,
args: DestinationBigqueryArgs,
opts: Optional[ResourceOptions] = None)
@overload
def DestinationBigquery(resource_name: str,
opts: Optional[ResourceOptions] = None,
configuration: Optional[DestinationBigqueryConfigurationArgs] = None,
workspace_id: Optional[str] = None,
definition_id: Optional[str] = None,
name: Optional[str] = None)func NewDestinationBigquery(ctx *Context, name string, args DestinationBigqueryArgs, opts ...ResourceOption) (*DestinationBigquery, error)public DestinationBigquery(string name, DestinationBigqueryArgs args, CustomResourceOptions? opts = null)
public DestinationBigquery(String name, DestinationBigqueryArgs args)
public DestinationBigquery(String name, DestinationBigqueryArgs args, CustomResourceOptions options)
type: airbyte:DestinationBigquery
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args DestinationBigqueryArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DestinationBigqueryArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DestinationBigqueryArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DestinationBigqueryArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DestinationBigqueryArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var destinationBigqueryResource = new Airbyte.DestinationBigquery("destinationBigqueryResource", new()
{
Configuration = new Airbyte.Inputs.DestinationBigqueryConfigurationArgs
{
DatasetId = "string",
DatasetLocation = "string",
ProjectId = "string",
AdditionalProperties = "string",
CdcDeletionMode = "string",
CredentialsJson = "string",
DisableTypeDedupe = false,
LoadingMethod = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodArgs
{
BatchedStandardInserts = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs
{
AdditionalProperties = "string",
Method = "string",
},
GcsStaging = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodGcsStagingArgs
{
Credential = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialArgs
{
HmacKey = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialHmacKeyArgs
{
HmacKeyAccessId = "string",
HmacKeySecret = "string",
AdditionalProperties = "string",
CredentialType = "string",
},
},
GcsBucketName = "string",
GcsBucketPath = "string",
AdditionalProperties = "string",
KeepFilesInGcsBucket = "string",
Method = "string",
},
},
RawDataDataset = "string",
},
WorkspaceId = "string",
DefinitionId = "string",
Name = "string",
});
example, err := airbyte.NewDestinationBigquery(ctx, "destinationBigqueryResource", &airbyte.DestinationBigqueryArgs{
Configuration: &airbyte.DestinationBigqueryConfigurationArgs{
DatasetId: pulumi.String("string"),
DatasetLocation: pulumi.String("string"),
ProjectId: pulumi.String("string"),
AdditionalProperties: pulumi.String("string"),
CdcDeletionMode: pulumi.String("string"),
CredentialsJson: pulumi.String("string"),
DisableTypeDedupe: pulumi.Bool(false),
LoadingMethod: &airbyte.DestinationBigqueryConfigurationLoadingMethodArgs{
BatchedStandardInserts: &airbyte.DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs{
AdditionalProperties: pulumi.String("string"),
Method: pulumi.String("string"),
},
GcsStaging: &airbyte.DestinationBigqueryConfigurationLoadingMethodGcsStagingArgs{
Credential: &airbyte.DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialArgs{
HmacKey: &airbyte.DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialHmacKeyArgs{
HmacKeyAccessId: pulumi.String("string"),
HmacKeySecret: pulumi.String("string"),
AdditionalProperties: pulumi.String("string"),
CredentialType: pulumi.String("string"),
},
},
GcsBucketName: pulumi.String("string"),
GcsBucketPath: pulumi.String("string"),
AdditionalProperties: pulumi.String("string"),
KeepFilesInGcsBucket: pulumi.String("string"),
Method: pulumi.String("string"),
},
},
RawDataDataset: pulumi.String("string"),
},
WorkspaceId: pulumi.String("string"),
DefinitionId: pulumi.String("string"),
Name: pulumi.String("string"),
})
var destinationBigqueryResource = new DestinationBigquery("destinationBigqueryResource", DestinationBigqueryArgs.builder()
.configuration(DestinationBigqueryConfigurationArgs.builder()
.datasetId("string")
.datasetLocation("string")
.projectId("string")
.additionalProperties("string")
.cdcDeletionMode("string")
.credentialsJson("string")
.disableTypeDedupe(false)
.loadingMethod(DestinationBigqueryConfigurationLoadingMethodArgs.builder()
.batchedStandardInserts(DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs.builder()
.additionalProperties("string")
.method("string")
.build())
.gcsStaging(DestinationBigqueryConfigurationLoadingMethodGcsStagingArgs.builder()
.credential(DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialArgs.builder()
.hmacKey(DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialHmacKeyArgs.builder()
.hmacKeyAccessId("string")
.hmacKeySecret("string")
.additionalProperties("string")
.credentialType("string")
.build())
.build())
.gcsBucketName("string")
.gcsBucketPath("string")
.additionalProperties("string")
.keepFilesInGcsBucket("string")
.method("string")
.build())
.build())
.rawDataDataset("string")
.build())
.workspaceId("string")
.definitionId("string")
.name("string")
.build());
destination_bigquery_resource = airbyte.DestinationBigquery("destinationBigqueryResource",
configuration={
"dataset_id": "string",
"dataset_location": "string",
"project_id": "string",
"additional_properties": "string",
"cdc_deletion_mode": "string",
"credentials_json": "string",
"disable_type_dedupe": False,
"loading_method": {
"batched_standard_inserts": {
"additional_properties": "string",
"method": "string",
},
"gcs_staging": {
"credential": {
"hmac_key": {
"hmac_key_access_id": "string",
"hmac_key_secret": "string",
"additional_properties": "string",
"credential_type": "string",
},
},
"gcs_bucket_name": "string",
"gcs_bucket_path": "string",
"additional_properties": "string",
"keep_files_in_gcs_bucket": "string",
"method": "string",
},
},
"raw_data_dataset": "string",
},
workspace_id="string",
definition_id="string",
name="string")
const destinationBigqueryResource = new airbyte.DestinationBigquery("destinationBigqueryResource", {
configuration: {
datasetId: "string",
datasetLocation: "string",
projectId: "string",
additionalProperties: "string",
cdcDeletionMode: "string",
credentialsJson: "string",
disableTypeDedupe: false,
loadingMethod: {
batchedStandardInserts: {
additionalProperties: "string",
method: "string",
},
gcsStaging: {
credential: {
hmacKey: {
hmacKeyAccessId: "string",
hmacKeySecret: "string",
additionalProperties: "string",
credentialType: "string",
},
},
gcsBucketName: "string",
gcsBucketPath: "string",
additionalProperties: "string",
keepFilesInGcsBucket: "string",
method: "string",
},
},
rawDataDataset: "string",
},
workspaceId: "string",
definitionId: "string",
name: "string",
});
type: airbyte:DestinationBigquery
properties:
configuration:
additionalProperties: string
cdcDeletionMode: string
credentialsJson: string
datasetId: string
datasetLocation: string
disableTypeDedupe: false
loadingMethod:
batchedStandardInserts:
additionalProperties: string
method: string
gcsStaging:
additionalProperties: string
credential:
hmacKey:
additionalProperties: string
credentialType: string
hmacKeyAccessId: string
hmacKeySecret: string
gcsBucketName: string
gcsBucketPath: string
keepFilesInGcsBucket: string
method: string
projectId: string
rawDataDataset: string
definitionId: string
name: string
workspaceId: string
DestinationBigquery Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The DestinationBigquery resource accepts the following input properties:
- Configuration
Destination
Bigquery Configuration - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- Workspace
Id string - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
- Name string
- Name of the destination e.g. dev-mysql-instance.
- Configuration
Destination
Bigquery Configuration Args - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- Workspace
Id string - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
- Name string
- Name of the destination e.g. dev-mysql-instance.
- configuration
Destination
Bigquery Configuration - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- workspace
Id String - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
- name String
- Name of the destination e.g. dev-mysql-instance.
- configuration
Destination
Bigquery Configuration - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- workspace
Id string - definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
- name string
- Name of the destination e.g. dev-mysql-instance.
- configuration
Destination
Bigquery Configuration Args - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- workspace_
id str - definition_
id str - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
- name str
- Name of the destination e.g. dev-mysql-instance.
- configuration Property Map
- The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- workspace
Id String - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
- name String
- Name of the destination e.g. dev-mysql-instance.
Outputs
All input properties are implicitly available as output properties. Additionally, the DestinationBigquery resource produces the following output properties:
- Created
At double - Destination
Id string - Destination
Type string - Id string
- The provider-assigned unique ID for this managed resource.
- Resource
Allocation DestinationBigquery Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Created
At float64 - Destination
Id string - Destination
Type string - Id string
- The provider-assigned unique ID for this managed resource.
- Resource
Allocation DestinationBigquery Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created
At Double - destination
Id String - destination
Type String - id String
- The provider-assigned unique ID for this managed resource.
- resource
Allocation DestinationBigquery Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created
At number - destination
Id string - destination
Type string - id string
- The provider-assigned unique ID for this managed resource.
- resource
Allocation DestinationBigquery Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created_
at float - destination_
id str - destination_
type str - id str
- The provider-assigned unique ID for this managed resource.
- resource_
allocation DestinationBigquery Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created
At Number - destination
Id String - destination
Type String - id String
- The provider-assigned unique ID for this managed resource.
- resource
Allocation Property Map - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
Look up Existing DestinationBigquery Resource
Get an existing DestinationBigquery resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DestinationBigqueryState, opts?: CustomResourceOptions): DestinationBigquery@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
configuration: Optional[DestinationBigqueryConfigurationArgs] = None,
created_at: Optional[float] = None,
definition_id: Optional[str] = None,
destination_id: Optional[str] = None,
destination_type: Optional[str] = None,
name: Optional[str] = None,
resource_allocation: Optional[DestinationBigqueryResourceAllocationArgs] = None,
workspace_id: Optional[str] = None) -> DestinationBigqueryfunc GetDestinationBigquery(ctx *Context, name string, id IDInput, state *DestinationBigqueryState, opts ...ResourceOption) (*DestinationBigquery, error)public static DestinationBigquery Get(string name, Input<string> id, DestinationBigqueryState? state, CustomResourceOptions? opts = null)public static DestinationBigquery get(String name, Output<String> id, DestinationBigqueryState state, CustomResourceOptions options)resources: _: type: airbyte:DestinationBigquery get: id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Configuration
Destination
Bigquery Configuration - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- Created
At double - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
- Destination
Id string - Destination
Type string - Name string
- Name of the destination e.g. dev-mysql-instance.
- Resource
Allocation DestinationBigquery Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Workspace
Id string
- Configuration
Destination
Bigquery Configuration Args - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- Created
At float64 - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
- Destination
Id string - Destination
Type string - Name string
- Name of the destination e.g. dev-mysql-instance.
- Resource
Allocation DestinationBigquery Resource Allocation Args - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Workspace
Id string
- configuration
Destination
Bigquery Configuration - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- created
At Double - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
- destination
Id String - destination
Type String - name String
- Name of the destination e.g. dev-mysql-instance.
- resource
Allocation DestinationBigquery Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace
Id String
- configuration
Destination
Bigquery Configuration - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- created
At number - definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
- destination
Id string - destination
Type string - name string
- Name of the destination e.g. dev-mysql-instance.
- resource
Allocation DestinationBigquery Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace
Id string
- configuration
Destination
Bigquery Configuration Args - The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- created_
at float - definition_
id str - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
- destination_
id str - destination_
type str - name str
- Name of the destination e.g. dev-mysql-instance.
- resource_
allocation DestinationBigquery Resource Allocation Args - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace_
id str
- configuration Property Map
- The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
- created
At Number - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
- destination
Id String - destination
Type String - name String
- Name of the destination e.g. dev-mysql-instance.
- resource
Allocation Property Map - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace
Id String
Supporting Types
DestinationBigqueryConfiguration, DestinationBigqueryConfigurationArgs
- Dataset
Id string - The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more \n\nhere\n\n.
- Dataset
Location string - The location of the dataset. Warning: Changes made after creation will not be applied. Read more \n\nhere\n\n. must be one of ["EU", "US", "africa-south1", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central2", "europe-north1", "europe-north2", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west8", "europe-west9", "europe-west10", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "northamerica-south1", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
- Project
Id string - The GCP project ID for the project containing the target BigQuery dataset. Read more \n\nhere\n\n.
- Additional
Properties string - Parsed as JSON.
- Cdc
Deletion stringMode - Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
- Credentials
Json string - The contents of the JSON service account key. Check out the \n\ndocs\n\n if you need help generating this key. Default credentials will be used if this field is left empty.
- Disable
Type boolDedupe - Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. Default: false
- Loading
Method DestinationBigquery Configuration Loading Method - The way data will be uploaded to BigQuery.
- Raw
Data stringDataset - Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
- Dataset
Id string - The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more \n\nhere\n\n.
- Dataset
Location string - The location of the dataset. Warning: Changes made after creation will not be applied. Read more \n\nhere\n\n. must be one of ["EU", "US", "africa-south1", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central2", "europe-north1", "europe-north2", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west8", "europe-west9", "europe-west10", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "northamerica-south1", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
- Project
Id string - The GCP project ID for the project containing the target BigQuery dataset. Read more \n\nhere\n\n.
- Additional
Properties string - Parsed as JSON.
- Cdc
Deletion stringMode - Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
- Credentials
Json string - The contents of the JSON service account key. Check out the \n\ndocs\n\n if you need help generating this key. Default credentials will be used if this field is left empty.
- Disable
Type boolDedupe - Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. Default: false
- Loading
Method DestinationBigquery Configuration Loading Method - The way data will be uploaded to BigQuery.
- Raw
Data stringDataset - Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
- dataset
Id String - The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more \n\nhere\n\n.
- dataset
Location String - The location of the dataset. Warning: Changes made after creation will not be applied. Read more \n\nhere\n\n. must be one of ["EU", "US", "africa-south1", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central2", "europe-north1", "europe-north2", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west8", "europe-west9", "europe-west10", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "northamerica-south1", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
- project
Id String - The GCP project ID for the project containing the target BigQuery dataset. Read more \n\nhere\n\n.
- additional
Properties String - Parsed as JSON.
- cdc
Deletion StringMode - Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
- credentials
Json String - The contents of the JSON service account key. Check out the \n\ndocs\n\n if you need help generating this key. Default credentials will be used if this field is left empty.
- disable
Type BooleanDedupe - Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. Default: false
- loading
Method DestinationBigquery Configuration Loading Method - The way data will be uploaded to BigQuery.
- raw
Data StringDataset - Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
- dataset
Id string - The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more \n\nhere\n\n.
- dataset
Location string - The location of the dataset. Warning: Changes made after creation will not be applied. Read more \n\nhere\n\n. must be one of ["EU", "US", "africa-south1", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central2", "europe-north1", "europe-north2", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west8", "europe-west9", "europe-west10", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "northamerica-south1", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
- project
Id string - The GCP project ID for the project containing the target BigQuery dataset. Read more \n\nhere\n\n.
- additional
Properties string - Parsed as JSON.
- cdc
Deletion stringMode - Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
- credentials
Json string - The contents of the JSON service account key. Check out the \n\ndocs\n\n if you need help generating this key. Default credentials will be used if this field is left empty.
- disable
Type booleanDedupe - Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. Default: false
- loading
Method DestinationBigquery Configuration Loading Method - The way data will be uploaded to BigQuery.
- raw
Data stringDataset - Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
- dataset_
id str - The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more \n\nhere\n\n.
- dataset_
location str - The location of the dataset. Warning: Changes made after creation will not be applied. Read more \n\nhere\n\n. must be one of ["EU", "US", "africa-south1", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central2", "europe-north1", "europe-north2", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west8", "europe-west9", "europe-west10", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "northamerica-south1", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
- project_
id str - The GCP project ID for the project containing the target BigQuery dataset. Read more \n\nhere\n\n.
- additional_
properties str - Parsed as JSON.
- cdc_
deletion_ strmode - Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
- credentials_
json str - The contents of the JSON service account key. Check out the \n\ndocs\n\n if you need help generating this key. Default credentials will be used if this field is left empty.
- disable_
type_ booldedupe - Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. Default: false
- loading_
method DestinationBigquery Configuration Loading Method - The way data will be uploaded to BigQuery.
- raw_
data_ strdataset - Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
- dataset
Id String - The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more \n\nhere\n\n.
- dataset
Location String - The location of the dataset. Warning: Changes made after creation will not be applied. Read more \n\nhere\n\n. must be one of ["EU", "US", "africa-south1", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central2", "europe-north1", "europe-north2", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west8", "europe-west9", "europe-west10", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "northamerica-south1", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
- project
Id String - The GCP project ID for the project containing the target BigQuery dataset. Read more \n\nhere\n\n.
- additional
Properties String - Parsed as JSON.
- cdc
Deletion StringMode - Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
- credentials
Json String - The contents of the JSON service account key. Check out the \n\ndocs\n\n if you need help generating this key. Default credentials will be used if this field is left empty.
- disable
Type BooleanDedupe - Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. Default: false
- loading
Method Property Map - The way data will be uploaded to BigQuery.
- raw
Data StringDataset - Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
DestinationBigqueryConfigurationLoadingMethod, DestinationBigqueryConfigurationLoadingMethodArgs
- Batched
Standard DestinationInserts Bigquery Configuration Loading Method Batched Standard Inserts - Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.
- Gcs
Staging DestinationBigquery Configuration Loading Method Gcs Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery.
- Batched
Standard DestinationInserts Bigquery Configuration Loading Method Batched Standard Inserts - Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.
- Gcs
Staging DestinationBigquery Configuration Loading Method Gcs Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery.
- batched
Standard DestinationInserts Bigquery Configuration Loading Method Batched Standard Inserts - Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.
- gcs
Staging DestinationBigquery Configuration Loading Method Gcs Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery.
- batched
Standard DestinationInserts Bigquery Configuration Loading Method Batched Standard Inserts - Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.
- gcs
Staging DestinationBigquery Configuration Loading Method Gcs Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery.
- batched_
standard_ Destinationinserts Bigquery Configuration Loading Method Batched Standard Inserts - Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.
- gcs_
staging DestinationBigquery Configuration Loading Method Gcs Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery.
- batched
Standard Property MapInserts - Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.
- gcs
Staging Property Map - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery.
DestinationBigqueryConfigurationLoadingMethodBatchedStandardInserts, DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs
- Additional
Properties string - Parsed as JSON.
- Method string
- Default: "Standard"; must be "Standard"
- Additional
Properties string - Parsed as JSON.
- Method string
- Default: "Standard"; must be "Standard"
- additional
Properties String - Parsed as JSON.
- method String
- Default: "Standard"; must be "Standard"
- additional
Properties string - Parsed as JSON.
- method string
- Default: "Standard"; must be "Standard"
- additional_
properties str - Parsed as JSON.
- method str
- Default: "Standard"; must be "Standard"
- additional
Properties String - Parsed as JSON.
- method String
- Default: "Standard"; must be "Standard"
DestinationBigqueryConfigurationLoadingMethodGcsStaging, DestinationBigqueryConfigurationLoadingMethodGcsStagingArgs
- Credential
Destination
Bigquery Configuration Loading Method Gcs Staging Credential - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more \n\nhere\n\n.
- Gcs
Bucket stringName - The name of the GCS bucket. Read more \n\nhere\n\n.
- Gcs
Bucket stringPath - Directory under the GCS bucket where data will be written.
- Additional
Properties string - Parsed as JSON.
- Keep
Files stringIn Gcs Bucket - This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. Default: "Delete all tmp files from GCS"; must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
- Method string
- Default: "GCS Staging"; must be "GCS Staging"
- Credential
Destination
Bigquery Configuration Loading Method Gcs Staging Credential - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more \n\nhere\n\n.
- Gcs
Bucket stringName - The name of the GCS bucket. Read more \n\nhere\n\n.
- Gcs
Bucket stringPath - Directory under the GCS bucket where data will be written.
- Additional
Properties string - Parsed as JSON.
- Keep
Files stringIn Gcs Bucket - This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. Default: "Delete all tmp files from GCS"; must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
- Method string
- Default: "GCS Staging"; must be "GCS Staging"
- credential
Destination
Bigquery Configuration Loading Method Gcs Staging Credential - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more \n\nhere\n\n.
- gcs
Bucket StringName - The name of the GCS bucket. Read more \n\nhere\n\n.
- gcs
Bucket StringPath - Directory under the GCS bucket where data will be written.
- additional
Properties String - Parsed as JSON.
- keep
Files StringIn Gcs Bucket - This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. Default: "Delete all tmp files from GCS"; must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
- method String
- Default: "GCS Staging"; must be "GCS Staging"
- credential
Destination
Bigquery Configuration Loading Method Gcs Staging Credential - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more \n\nhere\n\n.
- gcs
Bucket stringName - The name of the GCS bucket. Read more \n\nhere\n\n.
- gcs
Bucket stringPath - Directory under the GCS bucket where data will be written.
- additional
Properties string - Parsed as JSON.
- keep
Files stringIn Gcs Bucket - This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. Default: "Delete all tmp files from GCS"; must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
- method string
- Default: "GCS Staging"; must be "GCS Staging"
- credential
Destination
Bigquery Configuration Loading Method Gcs Staging Credential - An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more \n\nhere\n\n.
- gcs_
bucket_ strname - The name of the GCS bucket. Read more \n\nhere\n\n.
- gcs_
bucket_ strpath - Directory under the GCS bucket where data will be written.
- additional_
properties str - Parsed as JSON.
- keep_
files_ strin_ gcs_ bucket - This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. Default: "Delete all tmp files from GCS"; must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
- method str
- Default: "GCS Staging"; must be "GCS Staging"
- credential Property Map
- An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more \n\nhere\n\n.
- gcs
Bucket StringName - The name of the GCS bucket. Read more \n\nhere\n\n.
- gcs
Bucket StringPath - Directory under the GCS bucket where data will be written.
- additional
Properties String - Parsed as JSON.
- keep
Files StringIn Gcs Bucket - This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. Default: "Delete all tmp files from GCS"; must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
- method String
- Default: "GCS Staging"; must be "GCS Staging"
DestinationBigqueryConfigurationLoadingMethodGcsStagingCredential, DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialArgs
DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialHmacKey, DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialHmacKeyArgs
- Hmac
Key stringAccess Id - HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
- Hmac
Key stringSecret - The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
- Additional
Properties string - Parsed as JSON.
- Credential
Type string - Default: "HMACKEY"; must be "HMACKEY"
- Hmac
Key stringAccess Id - HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
- Hmac
Key stringSecret - The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
- Additional
Properties string - Parsed as JSON.
- Credential
Type string - Default: "HMACKEY"; must be "HMACKEY"
- hmac
Key StringAccess Id - HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
- hmac
Key StringSecret - The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
- additional
Properties String - Parsed as JSON.
- credential
Type String - Default: "HMACKEY"; must be "HMACKEY"
- hmac
Key stringAccess Id - HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
- hmac
Key stringSecret - The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
- additional
Properties string - Parsed as JSON.
- credential
Type string - Default: "HMACKEY"; must be "HMACKEY"
- hmac_
key_ straccess_ id - HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
- hmac_
key_ strsecret - The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
- additional_
properties str - Parsed as JSON.
- credential_
type str - Default: "HMACKEY"; must be "HMACKEY"
- hmac
Key StringAccess Id - HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
- hmac
Key StringSecret - The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
- additional
Properties String - Parsed as JSON.
- credential
Type String - Default: "HMACKEY"; must be "HMACKEY"
DestinationBigqueryResourceAllocation, DestinationBigqueryResourceAllocationArgs
- Default
Destination
Bigquery Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Specifics List<DestinationBigquery Resource Allocation Job Specific>
- Default
Destination
Bigquery Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Specifics []DestinationBigquery Resource Allocation Job Specific
- default_
Destination
Bigquery Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics List<DestinationBigquery Resource Allocation Job Specific>
- default
Destination
Bigquery Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics DestinationBigquery Resource Allocation Job Specific[]
- default
Destination
Bigquery Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job_
specifics Sequence[DestinationBigquery Resource Allocation Job Specific]
- default Property Map
- optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics List<Property Map>
DestinationBigqueryResourceAllocationDefault, DestinationBigqueryResourceAllocationDefaultArgs
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
- cpu
Limit string - cpu
Request string - ephemeral
Storage stringLimit - ephemeral
Storage stringRequest - memory
Limit string - memory
Request string
- cpu_
limit str - cpu_
request str - ephemeral_
storage_ strlimit - ephemeral_
storage_ strrequest - memory_
limit str - memory_
request str
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
DestinationBigqueryResourceAllocationJobSpecific, DestinationBigqueryResourceAllocationJobSpecificArgs
- Job
Type string - enum that describes the different types of jobs that the platform runs.
- Resource
Requirements DestinationBigquery Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Type string - enum that describes the different types of jobs that the platform runs.
- Resource
Requirements DestinationBigquery Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type String - enum that describes the different types of jobs that the platform runs.
- resource
Requirements DestinationBigquery Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type string - enum that describes the different types of jobs that the platform runs.
- resource
Requirements DestinationBigquery Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job_
type str - enum that describes the different types of jobs that the platform runs.
- resource_
requirements DestinationBigquery Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type String - enum that describes the different types of jobs that the platform runs.
- resource
Requirements Property Map - optional resource requirements to run workers (blank for unbounded allocations)
DestinationBigqueryResourceAllocationJobSpecificResourceRequirements, DestinationBigqueryResourceAllocationJobSpecificResourceRequirementsArgs
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
- cpu
Limit string - cpu
Request string - ephemeral
Storage stringLimit - ephemeral
Storage stringRequest - memory
Limit string - memory
Request string
- cpu_
limit str - cpu_
request str - ephemeral_
storage_ strlimit - ephemeral_
storage_ strrequest - memory_
limit str - memory_
request str
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
Import
In Terraform v1.5.0 and later, the import block can be used with the id attribute, for example:
terraform
import {
to = airbyte_destination_bigquery.my_airbyte_destination_bigquery
id = “…”
}
The pulumi import command can be used, for example:
$ pulumi import airbyte:index/destinationBigquery:DestinationBigquery my_airbyte_destination_bigquery "..."
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- airbyte airbytehq/terraform-provider-airbyte
- License
- Notes
- This Pulumi package is based on the
airbyteTerraform Provider.
