1. Packages
  2. Airbyte Provider
  3. API Docs
  4. DestinationBigquery
airbyte 1.0.0-rc6 published on Monday, Feb 16, 2026 by airbytehq
airbyte logo
airbyte 1.0.0-rc6 published on Monday, Feb 16, 2026 by airbytehq

    DestinationBigquery Resource

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as airbyte from "@pulumi/airbyte";
    
    const myDestinationBigquery = new airbyte.DestinationBigquery("my_destination_bigquery", {
        configuration: {
            additionalProperties: "{ \"see\": \"documentation\" }",
            cdcDeletionMode: "Soft delete",
            credentialsJson: "...my_credentials_json...",
            datasetId: "...my_dataset_id...",
            datasetLocation: "EU",
            disableTypeDedupe: true,
            loadingMethod: {
                batchedStandardInserts: {
                    additionalProperties: "{ \"see\": \"documentation\" }",
                    method: "Standard",
                },
            },
            projectId: "...my_project_id...",
            rawDataDataset: "...my_raw_data_dataset...",
        },
        definitionId: "92c3eb2b-6d61-4610-adf2-eee065419ed9",
        name: "...my_name...",
        workspaceId: "acee73dd-54d3-476f-a8ea-d39d218f52cd",
    });
    
    import pulumi
    import pulumi_airbyte as airbyte
    
    my_destination_bigquery = airbyte.DestinationBigquery("my_destination_bigquery",
        configuration={
            "additional_properties": "{ \"see\": \"documentation\" }",
            "cdc_deletion_mode": "Soft delete",
            "credentials_json": "...my_credentials_json...",
            "dataset_id": "...my_dataset_id...",
            "dataset_location": "EU",
            "disable_type_dedupe": True,
            "loading_method": {
                "batched_standard_inserts": {
                    "additional_properties": "{ \"see\": \"documentation\" }",
                    "method": "Standard",
                },
            },
            "project_id": "...my_project_id...",
            "raw_data_dataset": "...my_raw_data_dataset...",
        },
        definition_id="92c3eb2b-6d61-4610-adf2-eee065419ed9",
        name="...my_name...",
        workspace_id="acee73dd-54d3-476f-a8ea-d39d218f52cd")
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-terraform-provider/sdks/go/airbyte/airbyte"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := airbyte.NewDestinationBigquery(ctx, "my_destination_bigquery", &airbyte.DestinationBigqueryArgs{
    			Configuration: &airbyte.DestinationBigqueryConfigurationArgs{
    				AdditionalProperties: pulumi.String("{ \"see\": \"documentation\" }"),
    				CdcDeletionMode:      pulumi.String("Soft delete"),
    				CredentialsJson:      pulumi.String("...my_credentials_json..."),
    				DatasetId:            pulumi.String("...my_dataset_id..."),
    				DatasetLocation:      pulumi.String("EU"),
    				DisableTypeDedupe:    pulumi.Bool(true),
    				LoadingMethod: &airbyte.DestinationBigqueryConfigurationLoadingMethodArgs{
    					BatchedStandardInserts: &airbyte.DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs{
    						AdditionalProperties: pulumi.String("{ \"see\": \"documentation\" }"),
    						Method:               pulumi.String("Standard"),
    					},
    				},
    				ProjectId:      pulumi.String("...my_project_id..."),
    				RawDataDataset: pulumi.String("...my_raw_data_dataset..."),
    			},
    			DefinitionId: pulumi.String("92c3eb2b-6d61-4610-adf2-eee065419ed9"),
    			Name:         pulumi.String("...my_name..."),
    			WorkspaceId:  pulumi.String("acee73dd-54d3-476f-a8ea-d39d218f52cd"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Airbyte = Pulumi.Airbyte;
    
    return await Deployment.RunAsync(() => 
    {
        var myDestinationBigquery = new Airbyte.DestinationBigquery("my_destination_bigquery", new()
        {
            Configuration = new Airbyte.Inputs.DestinationBigqueryConfigurationArgs
            {
                AdditionalProperties = "{ \"see\": \"documentation\" }",
                CdcDeletionMode = "Soft delete",
                CredentialsJson = "...my_credentials_json...",
                DatasetId = "...my_dataset_id...",
                DatasetLocation = "EU",
                DisableTypeDedupe = true,
                LoadingMethod = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodArgs
                {
                    BatchedStandardInserts = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs
                    {
                        AdditionalProperties = "{ \"see\": \"documentation\" }",
                        Method = "Standard",
                    },
                },
                ProjectId = "...my_project_id...",
                RawDataDataset = "...my_raw_data_dataset...",
            },
            DefinitionId = "92c3eb2b-6d61-4610-adf2-eee065419ed9",
            Name = "...my_name...",
            WorkspaceId = "acee73dd-54d3-476f-a8ea-d39d218f52cd",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.airbyte.DestinationBigquery;
    import com.pulumi.airbyte.DestinationBigqueryArgs;
    import com.pulumi.airbyte.inputs.DestinationBigqueryConfigurationArgs;
    import com.pulumi.airbyte.inputs.DestinationBigqueryConfigurationLoadingMethodArgs;
    import com.pulumi.airbyte.inputs.DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var myDestinationBigquery = new DestinationBigquery("myDestinationBigquery", DestinationBigqueryArgs.builder()
                .configuration(DestinationBigqueryConfigurationArgs.builder()
                    .additionalProperties("{ \"see\": \"documentation\" }")
                    .cdcDeletionMode("Soft delete")
                    .credentialsJson("...my_credentials_json...")
                    .datasetId("...my_dataset_id...")
                    .datasetLocation("EU")
                    .disableTypeDedupe(true)
                    .loadingMethod(DestinationBigqueryConfigurationLoadingMethodArgs.builder()
                        .batchedStandardInserts(DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs.builder()
                            .additionalProperties("{ \"see\": \"documentation\" }")
                            .method("Standard")
                            .build())
                        .build())
                    .projectId("...my_project_id...")
                    .rawDataDataset("...my_raw_data_dataset...")
                    .build())
                .definitionId("92c3eb2b-6d61-4610-adf2-eee065419ed9")
                .name("...my_name...")
                .workspaceId("acee73dd-54d3-476f-a8ea-d39d218f52cd")
                .build());
    
        }
    }
    
    resources:
      myDestinationBigquery:
        type: airbyte:DestinationBigquery
        name: my_destination_bigquery
        properties:
          configuration:
            additionalProperties: '{ "see": "documentation" }'
            cdcDeletionMode: Soft delete
            credentialsJson: '...my_credentials_json...'
            datasetId: '...my_dataset_id...'
            datasetLocation: EU
            disableTypeDedupe: true
            loadingMethod:
              batchedStandardInserts:
                additionalProperties: '{ "see": "documentation" }'
                method: Standard
            projectId: '...my_project_id...'
            rawDataDataset: '...my_raw_data_dataset...'
          definitionId: 92c3eb2b-6d61-4610-adf2-eee065419ed9
          name: '...my_name...'
          workspaceId: acee73dd-54d3-476f-a8ea-d39d218f52cd
    

    Create DestinationBigquery Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new DestinationBigquery(name: string, args: DestinationBigqueryArgs, opts?: CustomResourceOptions);
    @overload
    def DestinationBigquery(resource_name: str,
                            args: DestinationBigqueryArgs,
                            opts: Optional[ResourceOptions] = None)
    
    @overload
    def DestinationBigquery(resource_name: str,
                            opts: Optional[ResourceOptions] = None,
                            configuration: Optional[DestinationBigqueryConfigurationArgs] = None,
                            workspace_id: Optional[str] = None,
                            definition_id: Optional[str] = None,
                            name: Optional[str] = None)
    func NewDestinationBigquery(ctx *Context, name string, args DestinationBigqueryArgs, opts ...ResourceOption) (*DestinationBigquery, error)
    public DestinationBigquery(string name, DestinationBigqueryArgs args, CustomResourceOptions? opts = null)
    public DestinationBigquery(String name, DestinationBigqueryArgs args)
    public DestinationBigquery(String name, DestinationBigqueryArgs args, CustomResourceOptions options)
    
    type: airbyte:DestinationBigquery
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args DestinationBigqueryArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args DestinationBigqueryArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args DestinationBigqueryArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args DestinationBigqueryArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args DestinationBigqueryArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var destinationBigqueryResource = new Airbyte.DestinationBigquery("destinationBigqueryResource", new()
    {
        Configuration = new Airbyte.Inputs.DestinationBigqueryConfigurationArgs
        {
            DatasetId = "string",
            DatasetLocation = "string",
            ProjectId = "string",
            AdditionalProperties = "string",
            CdcDeletionMode = "string",
            CredentialsJson = "string",
            DisableTypeDedupe = false,
            LoadingMethod = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodArgs
            {
                BatchedStandardInserts = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs
                {
                    AdditionalProperties = "string",
                    Method = "string",
                },
                GcsStaging = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodGcsStagingArgs
                {
                    Credential = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialArgs
                    {
                        HmacKey = new Airbyte.Inputs.DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialHmacKeyArgs
                        {
                            HmacKeyAccessId = "string",
                            HmacKeySecret = "string",
                            AdditionalProperties = "string",
                            CredentialType = "string",
                        },
                    },
                    GcsBucketName = "string",
                    GcsBucketPath = "string",
                    AdditionalProperties = "string",
                    KeepFilesInGcsBucket = "string",
                    Method = "string",
                },
            },
            RawDataDataset = "string",
        },
        WorkspaceId = "string",
        DefinitionId = "string",
        Name = "string",
    });
    
    example, err := airbyte.NewDestinationBigquery(ctx, "destinationBigqueryResource", &airbyte.DestinationBigqueryArgs{
    	Configuration: &airbyte.DestinationBigqueryConfigurationArgs{
    		DatasetId:            pulumi.String("string"),
    		DatasetLocation:      pulumi.String("string"),
    		ProjectId:            pulumi.String("string"),
    		AdditionalProperties: pulumi.String("string"),
    		CdcDeletionMode:      pulumi.String("string"),
    		CredentialsJson:      pulumi.String("string"),
    		DisableTypeDedupe:    pulumi.Bool(false),
    		LoadingMethod: &airbyte.DestinationBigqueryConfigurationLoadingMethodArgs{
    			BatchedStandardInserts: &airbyte.DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs{
    				AdditionalProperties: pulumi.String("string"),
    				Method:               pulumi.String("string"),
    			},
    			GcsStaging: &airbyte.DestinationBigqueryConfigurationLoadingMethodGcsStagingArgs{
    				Credential: &airbyte.DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialArgs{
    					HmacKey: &airbyte.DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialHmacKeyArgs{
    						HmacKeyAccessId:      pulumi.String("string"),
    						HmacKeySecret:        pulumi.String("string"),
    						AdditionalProperties: pulumi.String("string"),
    						CredentialType:       pulumi.String("string"),
    					},
    				},
    				GcsBucketName:        pulumi.String("string"),
    				GcsBucketPath:        pulumi.String("string"),
    				AdditionalProperties: pulumi.String("string"),
    				KeepFilesInGcsBucket: pulumi.String("string"),
    				Method:               pulumi.String("string"),
    			},
    		},
    		RawDataDataset: pulumi.String("string"),
    	},
    	WorkspaceId:  pulumi.String("string"),
    	DefinitionId: pulumi.String("string"),
    	Name:         pulumi.String("string"),
    })
    
    var destinationBigqueryResource = new DestinationBigquery("destinationBigqueryResource", DestinationBigqueryArgs.builder()
        .configuration(DestinationBigqueryConfigurationArgs.builder()
            .datasetId("string")
            .datasetLocation("string")
            .projectId("string")
            .additionalProperties("string")
            .cdcDeletionMode("string")
            .credentialsJson("string")
            .disableTypeDedupe(false)
            .loadingMethod(DestinationBigqueryConfigurationLoadingMethodArgs.builder()
                .batchedStandardInserts(DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs.builder()
                    .additionalProperties("string")
                    .method("string")
                    .build())
                .gcsStaging(DestinationBigqueryConfigurationLoadingMethodGcsStagingArgs.builder()
                    .credential(DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialArgs.builder()
                        .hmacKey(DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialHmacKeyArgs.builder()
                            .hmacKeyAccessId("string")
                            .hmacKeySecret("string")
                            .additionalProperties("string")
                            .credentialType("string")
                            .build())
                        .build())
                    .gcsBucketName("string")
                    .gcsBucketPath("string")
                    .additionalProperties("string")
                    .keepFilesInGcsBucket("string")
                    .method("string")
                    .build())
                .build())
            .rawDataDataset("string")
            .build())
        .workspaceId("string")
        .definitionId("string")
        .name("string")
        .build());
    
    destination_bigquery_resource = airbyte.DestinationBigquery("destinationBigqueryResource",
        configuration={
            "dataset_id": "string",
            "dataset_location": "string",
            "project_id": "string",
            "additional_properties": "string",
            "cdc_deletion_mode": "string",
            "credentials_json": "string",
            "disable_type_dedupe": False,
            "loading_method": {
                "batched_standard_inserts": {
                    "additional_properties": "string",
                    "method": "string",
                },
                "gcs_staging": {
                    "credential": {
                        "hmac_key": {
                            "hmac_key_access_id": "string",
                            "hmac_key_secret": "string",
                            "additional_properties": "string",
                            "credential_type": "string",
                        },
                    },
                    "gcs_bucket_name": "string",
                    "gcs_bucket_path": "string",
                    "additional_properties": "string",
                    "keep_files_in_gcs_bucket": "string",
                    "method": "string",
                },
            },
            "raw_data_dataset": "string",
        },
        workspace_id="string",
        definition_id="string",
        name="string")
    
    const destinationBigqueryResource = new airbyte.DestinationBigquery("destinationBigqueryResource", {
        configuration: {
            datasetId: "string",
            datasetLocation: "string",
            projectId: "string",
            additionalProperties: "string",
            cdcDeletionMode: "string",
            credentialsJson: "string",
            disableTypeDedupe: false,
            loadingMethod: {
                batchedStandardInserts: {
                    additionalProperties: "string",
                    method: "string",
                },
                gcsStaging: {
                    credential: {
                        hmacKey: {
                            hmacKeyAccessId: "string",
                            hmacKeySecret: "string",
                            additionalProperties: "string",
                            credentialType: "string",
                        },
                    },
                    gcsBucketName: "string",
                    gcsBucketPath: "string",
                    additionalProperties: "string",
                    keepFilesInGcsBucket: "string",
                    method: "string",
                },
            },
            rawDataDataset: "string",
        },
        workspaceId: "string",
        definitionId: "string",
        name: "string",
    });
    
    type: airbyte:DestinationBigquery
    properties:
        configuration:
            additionalProperties: string
            cdcDeletionMode: string
            credentialsJson: string
            datasetId: string
            datasetLocation: string
            disableTypeDedupe: false
            loadingMethod:
                batchedStandardInserts:
                    additionalProperties: string
                    method: string
                gcsStaging:
                    additionalProperties: string
                    credential:
                        hmacKey:
                            additionalProperties: string
                            credentialType: string
                            hmacKeyAccessId: string
                            hmacKeySecret: string
                    gcsBucketName: string
                    gcsBucketPath: string
                    keepFilesInGcsBucket: string
                    method: string
            projectId: string
            rawDataDataset: string
        definitionId: string
        name: string
        workspaceId: string
    

    DestinationBigquery Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The DestinationBigquery resource accepts the following input properties:

    Configuration DestinationBigqueryConfiguration
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
    Name string
    Name of the destination e.g. dev-mysql-instance.
    Configuration DestinationBigqueryConfigurationArgs
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
    Name string
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationBigqueryConfiguration
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
    name String
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationBigqueryConfiguration
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    workspaceId string
    definitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
    name string
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationBigqueryConfigurationArgs
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    workspace_id str
    definition_id str
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
    name str
    Name of the destination e.g. dev-mysql-instance.
    configuration Property Map
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
    name String
    Name of the destination e.g. dev-mysql-instance.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the DestinationBigquery resource produces the following output properties:

    CreatedAt double
    DestinationId string
    DestinationType string
    Id string
    The provider-assigned unique ID for this managed resource.
    ResourceAllocation DestinationBigqueryResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    CreatedAt float64
    DestinationId string
    DestinationType string
    Id string
    The provider-assigned unique ID for this managed resource.
    ResourceAllocation DestinationBigqueryResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt Double
    destinationId String
    destinationType String
    id String
    The provider-assigned unique ID for this managed resource.
    resourceAllocation DestinationBigqueryResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt number
    destinationId string
    destinationType string
    id string
    The provider-assigned unique ID for this managed resource.
    resourceAllocation DestinationBigqueryResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    created_at float
    destination_id str
    destination_type str
    id str
    The provider-assigned unique ID for this managed resource.
    resource_allocation DestinationBigqueryResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt Number
    destinationId String
    destinationType String
    id String
    The provider-assigned unique ID for this managed resource.
    resourceAllocation Property Map
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.

    Look up Existing DestinationBigquery Resource

    Get an existing DestinationBigquery resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: DestinationBigqueryState, opts?: CustomResourceOptions): DestinationBigquery
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            configuration: Optional[DestinationBigqueryConfigurationArgs] = None,
            created_at: Optional[float] = None,
            definition_id: Optional[str] = None,
            destination_id: Optional[str] = None,
            destination_type: Optional[str] = None,
            name: Optional[str] = None,
            resource_allocation: Optional[DestinationBigqueryResourceAllocationArgs] = None,
            workspace_id: Optional[str] = None) -> DestinationBigquery
    func GetDestinationBigquery(ctx *Context, name string, id IDInput, state *DestinationBigqueryState, opts ...ResourceOption) (*DestinationBigquery, error)
    public static DestinationBigquery Get(string name, Input<string> id, DestinationBigqueryState? state, CustomResourceOptions? opts = null)
    public static DestinationBigquery get(String name, Output<String> id, DestinationBigqueryState state, CustomResourceOptions options)
    resources:  _:    type: airbyte:DestinationBigquery    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Configuration DestinationBigqueryConfiguration
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    CreatedAt double
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
    DestinationId string
    DestinationType string
    Name string
    Name of the destination e.g. dev-mysql-instance.
    ResourceAllocation DestinationBigqueryResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    WorkspaceId string
    Configuration DestinationBigqueryConfigurationArgs
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    CreatedAt float64
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
    DestinationId string
    DestinationType string
    Name string
    Name of the destination e.g. dev-mysql-instance.
    ResourceAllocation DestinationBigqueryResourceAllocationArgs
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    WorkspaceId string
    configuration DestinationBigqueryConfiguration
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    createdAt Double
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
    destinationId String
    destinationType String
    name String
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation DestinationBigqueryResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId String
    configuration DestinationBigqueryConfiguration
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    createdAt number
    definitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
    destinationId string
    destinationType string
    name string
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation DestinationBigqueryResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId string
    configuration DestinationBigqueryConfigurationArgs
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    created_at float
    definition_id str
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
    destination_id str
    destination_type str
    name str
    Name of the destination e.g. dev-mysql-instance.
    resource_allocation DestinationBigqueryResourceAllocationArgs
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspace_id str
    configuration Property Map
    The values required to configure the destination. The schema for this must match the schema return by destinationdefinitionspecifications/get for the destinationDefinition.
    createdAt Number
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Default: "22f6c74f-5699-40ff-833c-4a879ea40133"; Requires replacement if changed.
    destinationId String
    destinationType String
    name String
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation Property Map
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId String

    Supporting Types

    DestinationBigqueryConfiguration, DestinationBigqueryConfigurationArgs

    DatasetId string
    The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more \n\nhere\n\n.
    DatasetLocation string
    The location of the dataset. Warning: Changes made after creation will not be applied. Read more \n\nhere\n\n. must be one of ["EU", "US", "africa-south1", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central2", "europe-north1", "europe-north2", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west8", "europe-west9", "europe-west10", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "northamerica-south1", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
    ProjectId string
    The GCP project ID for the project containing the target BigQuery dataset. Read more \n\nhere\n\n.
    AdditionalProperties string
    Parsed as JSON.
    CdcDeletionMode string
    Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
    CredentialsJson string
    The contents of the JSON service account key. Check out the \n\ndocs\n\n if you need help generating this key. Default credentials will be used if this field is left empty.
    DisableTypeDedupe bool
    Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. Default: false
    LoadingMethod DestinationBigqueryConfigurationLoadingMethod
    The way data will be uploaded to BigQuery.
    RawDataDataset string
    Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
    DatasetId string
    The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more \n\nhere\n\n.
    DatasetLocation string
    The location of the dataset. Warning: Changes made after creation will not be applied. Read more \n\nhere\n\n. must be one of ["EU", "US", "africa-south1", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central2", "europe-north1", "europe-north2", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west8", "europe-west9", "europe-west10", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "northamerica-south1", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
    ProjectId string
    The GCP project ID for the project containing the target BigQuery dataset. Read more \n\nhere\n\n.
    AdditionalProperties string
    Parsed as JSON.
    CdcDeletionMode string
    Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
    CredentialsJson string
    The contents of the JSON service account key. Check out the \n\ndocs\n\n if you need help generating this key. Default credentials will be used if this field is left empty.
    DisableTypeDedupe bool
    Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. Default: false
    LoadingMethod DestinationBigqueryConfigurationLoadingMethod
    The way data will be uploaded to BigQuery.
    RawDataDataset string
    Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
    datasetId String
    The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more \n\nhere\n\n.
    datasetLocation String
    The location of the dataset. Warning: Changes made after creation will not be applied. Read more \n\nhere\n\n. must be one of ["EU", "US", "africa-south1", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central2", "europe-north1", "europe-north2", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west8", "europe-west9", "europe-west10", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "northamerica-south1", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
    projectId String
    The GCP project ID for the project containing the target BigQuery dataset. Read more \n\nhere\n\n.
    additionalProperties String
    Parsed as JSON.
    cdcDeletionMode String
    Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
    credentialsJson String
    The contents of the JSON service account key. Check out the \n\ndocs\n\n if you need help generating this key. Default credentials will be used if this field is left empty.
    disableTypeDedupe Boolean
    Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. Default: false
    loadingMethod DestinationBigqueryConfigurationLoadingMethod
    The way data will be uploaded to BigQuery.
    rawDataDataset String
    Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
    datasetId string
    The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more \n\nhere\n\n.
    datasetLocation string
    The location of the dataset. Warning: Changes made after creation will not be applied. Read more \n\nhere\n\n. must be one of ["EU", "US", "africa-south1", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central2", "europe-north1", "europe-north2", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west8", "europe-west9", "europe-west10", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "northamerica-south1", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
    projectId string
    The GCP project ID for the project containing the target BigQuery dataset. Read more \n\nhere\n\n.
    additionalProperties string
    Parsed as JSON.
    cdcDeletionMode string
    Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
    credentialsJson string
    The contents of the JSON service account key. Check out the \n\ndocs\n\n if you need help generating this key. Default credentials will be used if this field is left empty.
    disableTypeDedupe boolean
    Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. Default: false
    loadingMethod DestinationBigqueryConfigurationLoadingMethod
    The way data will be uploaded to BigQuery.
    rawDataDataset string
    Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
    dataset_id str
    The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more \n\nhere\n\n.
    dataset_location str
    The location of the dataset. Warning: Changes made after creation will not be applied. Read more \n\nhere\n\n. must be one of ["EU", "US", "africa-south1", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central2", "europe-north1", "europe-north2", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west8", "europe-west9", "europe-west10", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "northamerica-south1", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
    project_id str
    The GCP project ID for the project containing the target BigQuery dataset. Read more \n\nhere\n\n.
    additional_properties str
    Parsed as JSON.
    cdc_deletion_mode str
    Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
    credentials_json str
    The contents of the JSON service account key. Check out the \n\ndocs\n\n if you need help generating this key. Default credentials will be used if this field is left empty.
    disable_type_dedupe bool
    Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. Default: false
    loading_method DestinationBigqueryConfigurationLoadingMethod
    The way data will be uploaded to BigQuery.
    raw_data_dataset str
    Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".
    datasetId String
    The default BigQuery Dataset ID that tables are replicated to if the source does not specify a namespace. Read more \n\nhere\n\n.
    datasetLocation String
    The location of the dataset. Warning: Changes made after creation will not be applied. Read more \n\nhere\n\n. must be one of ["EU", "US", "africa-south1", "asia-east1", "asia-east2", "asia-northeast1", "asia-northeast2", "asia-northeast3", "asia-south1", "asia-south2", "asia-southeast1", "asia-southeast2", "australia-southeast1", "australia-southeast2", "europe-central2", "europe-north1", "europe-north2", "europe-southwest1", "europe-west1", "europe-west2", "europe-west3", "europe-west4", "europe-west6", "europe-west8", "europe-west9", "europe-west10", "europe-west12", "me-central1", "me-central2", "me-west1", "northamerica-northeast1", "northamerica-northeast2", "northamerica-south1", "southamerica-east1", "southamerica-west1", "us-central1", "us-east1", "us-east4", "us-east5", "us-south1", "us-west1", "us-west2", "us-west3", "us-west4"]
    projectId String
    The GCP project ID for the project containing the target BigQuery dataset. Read more \n\nhere\n\n.
    additionalProperties String
    Parsed as JSON.
    cdcDeletionMode String
    Whether to execute CDC deletions as hard deletes (i.e. propagate source deletions to the destination), or soft deletes (i.e. leave a tombstone record in the destination). Defaults to hard deletes. Default: "Hard delete"; must be one of ["Hard delete", "Soft delete"]
    credentialsJson String
    The contents of the JSON service account key. Check out the \n\ndocs\n\n if you need help generating this key. Default credentials will be used if this field is left empty.
    disableTypeDedupe Boolean
    Write the legacy "raw tables" format, to enable backwards compatibility with older versions of this connector. Default: false
    loadingMethod Property Map
    The way data will be uploaded to BigQuery.
    rawDataDataset String
    Airbyte will use this dataset for various internal tables. In legacy raw tables mode, the raw tables will be stored in this dataset. Defaults to <span pulumi-lang-nodejs=""airbyteInternal"" pulumi-lang-dotnet=""AirbyteInternal"" pulumi-lang-go=""airbyteInternal"" pulumi-lang-python=""airbyte_internal"" pulumi-lang-yaml=""airbyteInternal"" pulumi-lang-java=""airbyteInternal"">"airbyte_internal".

    DestinationBigqueryConfigurationLoadingMethod, DestinationBigqueryConfigurationLoadingMethodArgs

    BatchedStandardInserts DestinationBigqueryConfigurationLoadingMethodBatchedStandardInserts
    Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.
    GcsStaging DestinationBigqueryConfigurationLoadingMethodGcsStaging
    Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery.
    BatchedStandardInserts DestinationBigqueryConfigurationLoadingMethodBatchedStandardInserts
    Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.
    GcsStaging DestinationBigqueryConfigurationLoadingMethodGcsStaging
    Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery.
    batchedStandardInserts DestinationBigqueryConfigurationLoadingMethodBatchedStandardInserts
    Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.
    gcsStaging DestinationBigqueryConfigurationLoadingMethodGcsStaging
    Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery.
    batchedStandardInserts DestinationBigqueryConfigurationLoadingMethodBatchedStandardInserts
    Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.
    gcsStaging DestinationBigqueryConfigurationLoadingMethodGcsStaging
    Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery.
    batched_standard_inserts DestinationBigqueryConfigurationLoadingMethodBatchedStandardInserts
    Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.
    gcs_staging DestinationBigqueryConfigurationLoadingMethodGcsStaging
    Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery.
    batchedStandardInserts Property Map
    Direct loading using batched SQL INSERT statements. This method uses the BigQuery driver to convert large INSERT statements into file uploads automatically.
    gcsStaging Property Map
    Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO to load your data into BigQuery.

    DestinationBigqueryConfigurationLoadingMethodBatchedStandardInserts, DestinationBigqueryConfigurationLoadingMethodBatchedStandardInsertsArgs

    AdditionalProperties string
    Parsed as JSON.
    Method string
    Default: "Standard"; must be "Standard"
    AdditionalProperties string
    Parsed as JSON.
    Method string
    Default: "Standard"; must be "Standard"
    additionalProperties String
    Parsed as JSON.
    method String
    Default: "Standard"; must be "Standard"
    additionalProperties string
    Parsed as JSON.
    method string
    Default: "Standard"; must be "Standard"
    additional_properties str
    Parsed as JSON.
    method str
    Default: "Standard"; must be "Standard"
    additionalProperties String
    Parsed as JSON.
    method String
    Default: "Standard"; must be "Standard"

    DestinationBigqueryConfigurationLoadingMethodGcsStaging, DestinationBigqueryConfigurationLoadingMethodGcsStagingArgs

    Credential DestinationBigqueryConfigurationLoadingMethodGcsStagingCredential
    An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more \n\nhere\n\n.
    GcsBucketName string
    The name of the GCS bucket. Read more \n\nhere\n\n.
    GcsBucketPath string
    Directory under the GCS bucket where data will be written.
    AdditionalProperties string
    Parsed as JSON.
    KeepFilesInGcsBucket string
    This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. Default: "Delete all tmp files from GCS"; must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
    Method string
    Default: "GCS Staging"; must be "GCS Staging"
    Credential DestinationBigqueryConfigurationLoadingMethodGcsStagingCredential
    An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more \n\nhere\n\n.
    GcsBucketName string
    The name of the GCS bucket. Read more \n\nhere\n\n.
    GcsBucketPath string
    Directory under the GCS bucket where data will be written.
    AdditionalProperties string
    Parsed as JSON.
    KeepFilesInGcsBucket string
    This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. Default: "Delete all tmp files from GCS"; must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
    Method string
    Default: "GCS Staging"; must be "GCS Staging"
    credential DestinationBigqueryConfigurationLoadingMethodGcsStagingCredential
    An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more \n\nhere\n\n.
    gcsBucketName String
    The name of the GCS bucket. Read more \n\nhere\n\n.
    gcsBucketPath String
    Directory under the GCS bucket where data will be written.
    additionalProperties String
    Parsed as JSON.
    keepFilesInGcsBucket String
    This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. Default: "Delete all tmp files from GCS"; must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
    method String
    Default: "GCS Staging"; must be "GCS Staging"
    credential DestinationBigqueryConfigurationLoadingMethodGcsStagingCredential
    An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more \n\nhere\n\n.
    gcsBucketName string
    The name of the GCS bucket. Read more \n\nhere\n\n.
    gcsBucketPath string
    Directory under the GCS bucket where data will be written.
    additionalProperties string
    Parsed as JSON.
    keepFilesInGcsBucket string
    This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. Default: "Delete all tmp files from GCS"; must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
    method string
    Default: "GCS Staging"; must be "GCS Staging"
    credential DestinationBigqueryConfigurationLoadingMethodGcsStagingCredential
    An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more \n\nhere\n\n.
    gcs_bucket_name str
    The name of the GCS bucket. Read more \n\nhere\n\n.
    gcs_bucket_path str
    Directory under the GCS bucket where data will be written.
    additional_properties str
    Parsed as JSON.
    keep_files_in_gcs_bucket str
    This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. Default: "Delete all tmp files from GCS"; must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
    method str
    Default: "GCS Staging"; must be "GCS Staging"
    credential Property Map
    An HMAC key is a type of credential and can be associated with a service account or a user account in Cloud Storage. Read more \n\nhere\n\n.
    gcsBucketName String
    The name of the GCS bucket. Read more \n\nhere\n\n.
    gcsBucketPath String
    Directory under the GCS bucket where data will be written.
    additionalProperties String
    Parsed as JSON.
    keepFilesInGcsBucket String
    This upload method is supposed to temporary store records in GCS bucket. By this select you can chose if these records should be removed from GCS when migration has finished. The default "Delete all tmp files from GCS" value is used if not set explicitly. Default: "Delete all tmp files from GCS"; must be one of ["Delete all tmp files from GCS", "Keep all tmp files in GCS"]
    method String
    Default: "GCS Staging"; must be "GCS Staging"

    DestinationBigqueryConfigurationLoadingMethodGcsStagingCredential, DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialArgs

    DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialHmacKey, DestinationBigqueryConfigurationLoadingMethodGcsStagingCredentialHmacKeyArgs

    HmacKeyAccessId string
    HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
    HmacKeySecret string
    The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
    AdditionalProperties string
    Parsed as JSON.
    CredentialType string
    Default: "HMACKEY"; must be "HMACKEY"
    HmacKeyAccessId string
    HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
    HmacKeySecret string
    The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
    AdditionalProperties string
    Parsed as JSON.
    CredentialType string
    Default: "HMACKEY"; must be "HMACKEY"
    hmacKeyAccessId String
    HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
    hmacKeySecret String
    The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
    additionalProperties String
    Parsed as JSON.
    credentialType String
    Default: "HMACKEY"; must be "HMACKEY"
    hmacKeyAccessId string
    HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
    hmacKeySecret string
    The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
    additionalProperties string
    Parsed as JSON.
    credentialType string
    Default: "HMACKEY"; must be "HMACKEY"
    hmac_key_access_id str
    HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
    hmac_key_secret str
    The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
    additional_properties str
    Parsed as JSON.
    credential_type str
    Default: "HMACKEY"; must be "HMACKEY"
    hmacKeyAccessId String
    HMAC key access ID. When linked to a service account, this ID is 61 characters long; when linked to a user account, it is 24 characters long.
    hmacKeySecret String
    The corresponding secret for the access ID. It is a 40-character base-64 encoded string.
    additionalProperties String
    Parsed as JSON.
    credentialType String
    Default: "HMACKEY"; must be "HMACKEY"

    DestinationBigqueryResourceAllocation, DestinationBigqueryResourceAllocationArgs

    Default DestinationBigqueryResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    JobSpecifics List<DestinationBigqueryResourceAllocationJobSpecific>
    Default DestinationBigqueryResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    JobSpecifics []DestinationBigqueryResourceAllocationJobSpecific
    default_ DestinationBigqueryResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics List<DestinationBigqueryResourceAllocationJobSpecific>
    default DestinationBigqueryResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics DestinationBigqueryResourceAllocationJobSpecific[]
    default Property Map
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics List<Property Map>

    DestinationBigqueryResourceAllocationDefault, DestinationBigqueryResourceAllocationDefaultArgs

    DestinationBigqueryResourceAllocationJobSpecific, DestinationBigqueryResourceAllocationJobSpecificArgs

    JobType string
    enum that describes the different types of jobs that the platform runs.
    ResourceRequirements DestinationBigqueryResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    JobType string
    enum that describes the different types of jobs that the platform runs.
    ResourceRequirements DestinationBigqueryResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType String
    enum that describes the different types of jobs that the platform runs.
    resourceRequirements DestinationBigqueryResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType string
    enum that describes the different types of jobs that the platform runs.
    resourceRequirements DestinationBigqueryResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    job_type str
    enum that describes the different types of jobs that the platform runs.
    resource_requirements DestinationBigqueryResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType String
    enum that describes the different types of jobs that the platform runs.
    resourceRequirements Property Map
    optional resource requirements to run workers (blank for unbounded allocations)

    DestinationBigqueryResourceAllocationJobSpecificResourceRequirements, DestinationBigqueryResourceAllocationJobSpecificResourceRequirementsArgs

    Import

    In Terraform v1.5.0 and later, the import block can be used with the id attribute, for example:

    terraform

    import {

    to = airbyte_destination_bigquery.my_airbyte_destination_bigquery

    id = “…”

    }

    The pulumi import command can be used, for example:

    $ pulumi import airbyte:index/destinationBigquery:DestinationBigquery my_airbyte_destination_bigquery "..."
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    airbyte airbytehq/terraform-provider-airbyte
    License
    Notes
    This Pulumi package is based on the airbyte Terraform Provider.
    airbyte logo
    airbyte 1.0.0-rc6 published on Monday, Feb 16, 2026 by airbytehq
      Meet Neo: Your AI Platform Teammate