1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. bigquery
  5. getTable
Google Cloud v8.34.0 published on Wednesday, Jun 11, 2025 by Pulumi

gcp.bigquery.getTable

Explore with Pulumi AI

gcp logo
Google Cloud v8.34.0 published on Wednesday, Jun 11, 2025 by Pulumi

    Get a specific table in a BigQuery dataset. For more information see the official documentation and API.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const table = gcp.bigquery.getTable({
        project: "my-project",
        datasetId: "my-bq-dataset",
        tableId: "my-table",
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    table = gcp.bigquery.get_table(project="my-project",
        dataset_id="my-bq-dataset",
        table_id="my-table")
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := bigquery.LookupTable(ctx, &bigquery.LookupTableArgs{
    			Project:   pulumi.StringRef("my-project"),
    			DatasetId: "my-bq-dataset",
    			TableId:   "my-table",
    		}, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var table = Gcp.BigQuery.GetTable.Invoke(new()
        {
            Project = "my-project",
            DatasetId = "my-bq-dataset",
            TableId = "my-table",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.bigquery.BigqueryFunctions;
    import com.pulumi.gcp.bigquery.inputs.GetTableArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var table = BigqueryFunctions.getTable(GetTableArgs.builder()
                .project("my-project")
                .datasetId("my-bq-dataset")
                .tableId("my-table")
                .build());
    
        }
    }
    
    variables:
      table:
        fn::invoke:
          function: gcp:bigquery:getTable
          arguments:
            project: my-project
            datasetId: my-bq-dataset
            tableId: my-table
    

    Using getTable

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getTable(args: GetTableArgs, opts?: InvokeOptions): Promise<GetTableResult>
    function getTableOutput(args: GetTableOutputArgs, opts?: InvokeOptions): Output<GetTableResult>
    def get_table(dataset_id: Optional[str] = None,
                  project: Optional[str] = None,
                  table_id: Optional[str] = None,
                  opts: Optional[InvokeOptions] = None) -> GetTableResult
    def get_table_output(dataset_id: Optional[pulumi.Input[str]] = None,
                  project: Optional[pulumi.Input[str]] = None,
                  table_id: Optional[pulumi.Input[str]] = None,
                  opts: Optional[InvokeOptions] = None) -> Output[GetTableResult]
    func LookupTable(ctx *Context, args *LookupTableArgs, opts ...InvokeOption) (*LookupTableResult, error)
    func LookupTableOutput(ctx *Context, args *LookupTableOutputArgs, opts ...InvokeOption) LookupTableResultOutput

    > Note: This function is named LookupTable in the Go SDK.

    public static class GetTable 
    {
        public static Task<GetTableResult> InvokeAsync(GetTableArgs args, InvokeOptions? opts = null)
        public static Output<GetTableResult> Invoke(GetTableInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetTableResult> getTable(GetTableArgs args, InvokeOptions options)
    public static Output<GetTableResult> getTable(GetTableArgs args, InvokeOptions options)
    
    fn::invoke:
      function: gcp:bigquery/getTable:getTable
      arguments:
        # arguments dictionary

    The following arguments are supported:

    DatasetId string
    The dataset ID.
    TableId string
    The table ID.
    Project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    DatasetId string
    The dataset ID.
    TableId string
    The table ID.
    Project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    datasetId String
    The dataset ID.
    tableId String
    The table ID.
    project String
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    datasetId string
    The dataset ID.
    tableId string
    The table ID.
    project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    dataset_id str
    The dataset ID.
    table_id str
    The table ID.
    project str
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    datasetId String
    The dataset ID.
    tableId String
    The table ID.
    project String
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

    getTable Result

    The following output properties are available:

    BiglakeConfigurations List<GetTableBiglakeConfiguration>
    Clusterings List<string>
    CreationTime int
    DatasetId string
    DeletionProtection bool
    Description string
    EffectiveLabels Dictionary<string, string>
    EncryptionConfigurations List<GetTableEncryptionConfiguration>
    Etag string
    ExpirationTime int
    ExternalCatalogTableOptions List<GetTableExternalCatalogTableOption>
    ExternalDataConfigurations List<GetTableExternalDataConfiguration>
    FriendlyName string
    Id string
    The provider-assigned unique ID for this managed resource.
    Labels Dictionary<string, string>
    LastModifiedTime int
    Location string
    MaterializedViews List<GetTableMaterializedView>
    MaxStaleness string
    NumBytes int
    NumLongTermBytes int
    NumRows int
    PulumiLabels Dictionary<string, string>
    RangePartitionings List<GetTableRangePartitioning>
    RequirePartitionFilter bool
    ResourceTags Dictionary<string, string>
    Schema string
    SchemaForeignTypeInfos List<GetTableSchemaForeignTypeInfo>
    SelfLink string
    TableConstraints List<GetTableTableConstraint>
    TableId string
    TableMetadataView string
    TableReplicationInfos List<GetTableTableReplicationInfo>
    TimePartitionings List<GetTableTimePartitioning>
    Type string
    Views List<GetTableView>
    Project string
    BiglakeConfigurations []GetTableBiglakeConfiguration
    Clusterings []string
    CreationTime int
    DatasetId string
    DeletionProtection bool
    Description string
    EffectiveLabels map[string]string
    EncryptionConfigurations []GetTableEncryptionConfiguration
    Etag string
    ExpirationTime int
    ExternalCatalogTableOptions []GetTableExternalCatalogTableOption
    ExternalDataConfigurations []GetTableExternalDataConfiguration
    FriendlyName string
    Id string
    The provider-assigned unique ID for this managed resource.
    Labels map[string]string
    LastModifiedTime int
    Location string
    MaterializedViews []GetTableMaterializedView
    MaxStaleness string
    NumBytes int
    NumLongTermBytes int
    NumRows int
    PulumiLabels map[string]string
    RangePartitionings []GetTableRangePartitioning
    RequirePartitionFilter bool
    ResourceTags map[string]string
    Schema string
    SchemaForeignTypeInfos []GetTableSchemaForeignTypeInfo
    SelfLink string
    TableConstraints []GetTableTableConstraint
    TableId string
    TableMetadataView string
    TableReplicationInfos []GetTableTableReplicationInfo
    TimePartitionings []GetTableTimePartitioning
    Type string
    Views []GetTableView
    Project string
    biglakeConfigurations List<GetTableBiglakeConfiguration>
    clusterings List<String>
    creationTime Integer
    datasetId String
    deletionProtection Boolean
    description String
    effectiveLabels Map<String,String>
    encryptionConfigurations List<GetTableEncryptionConfiguration>
    etag String
    expirationTime Integer
    externalCatalogTableOptions List<GetTableExternalCatalogTableOption>
    externalDataConfigurations List<GetTableExternalDataConfiguration>
    friendlyName String
    id String
    The provider-assigned unique ID for this managed resource.
    labels Map<String,String>
    lastModifiedTime Integer
    location String
    materializedViews List<GetTableMaterializedView>
    maxStaleness String
    numBytes Integer
    numLongTermBytes Integer
    numRows Integer
    pulumiLabels Map<String,String>
    rangePartitionings List<GetTableRangePartitioning>
    requirePartitionFilter Boolean
    resourceTags Map<String,String>
    schema String
    schemaForeignTypeInfos List<GetTableSchemaForeignTypeInfo>
    selfLink String
    tableConstraints List<GetTableTableConstraint>
    tableId String
    tableMetadataView String
    tableReplicationInfos List<GetTableTableReplicationInfo>
    timePartitionings List<GetTableTimePartitioning>
    type String
    views List<GetTableView>
    project String
    biglakeConfigurations GetTableBiglakeConfiguration[]
    clusterings string[]
    creationTime number
    datasetId string
    deletionProtection boolean
    description string
    effectiveLabels {[key: string]: string}
    encryptionConfigurations GetTableEncryptionConfiguration[]
    etag string
    expirationTime number
    externalCatalogTableOptions GetTableExternalCatalogTableOption[]
    externalDataConfigurations GetTableExternalDataConfiguration[]
    friendlyName string
    id string
    The provider-assigned unique ID for this managed resource.
    labels {[key: string]: string}
    lastModifiedTime number
    location string
    materializedViews GetTableMaterializedView[]
    maxStaleness string
    numBytes number
    numLongTermBytes number
    numRows number
    pulumiLabels {[key: string]: string}
    rangePartitionings GetTableRangePartitioning[]
    requirePartitionFilter boolean
    resourceTags {[key: string]: string}
    schema string
    schemaForeignTypeInfos GetTableSchemaForeignTypeInfo[]
    selfLink string
    tableConstraints GetTableTableConstraint[]
    tableId string
    tableMetadataView string
    tableReplicationInfos GetTableTableReplicationInfo[]
    timePartitionings GetTableTimePartitioning[]
    type string
    views GetTableView[]
    project string
    biglake_configurations Sequence[GetTableBiglakeConfiguration]
    clusterings Sequence[str]
    creation_time int
    dataset_id str
    deletion_protection bool
    description str
    effective_labels Mapping[str, str]
    encryption_configurations Sequence[GetTableEncryptionConfiguration]
    etag str
    expiration_time int
    external_catalog_table_options Sequence[GetTableExternalCatalogTableOption]
    external_data_configurations Sequence[GetTableExternalDataConfiguration]
    friendly_name str
    id str
    The provider-assigned unique ID for this managed resource.
    labels Mapping[str, str]
    last_modified_time int
    location str
    materialized_views Sequence[GetTableMaterializedView]
    max_staleness str
    num_bytes int
    num_long_term_bytes int
    num_rows int
    pulumi_labels Mapping[str, str]
    range_partitionings Sequence[GetTableRangePartitioning]
    require_partition_filter bool
    resource_tags Mapping[str, str]
    schema str
    schema_foreign_type_infos Sequence[GetTableSchemaForeignTypeInfo]
    self_link str
    table_constraints Sequence[GetTableTableConstraint]
    table_id str
    table_metadata_view str
    table_replication_infos Sequence[GetTableTableReplicationInfo]
    time_partitionings Sequence[GetTableTimePartitioning]
    type str
    views Sequence[GetTableView]
    project str
    biglakeConfigurations List<Property Map>
    clusterings List<String>
    creationTime Number
    datasetId String
    deletionProtection Boolean
    description String
    effectiveLabels Map<String>
    encryptionConfigurations List<Property Map>
    etag String
    expirationTime Number
    externalCatalogTableOptions List<Property Map>
    externalDataConfigurations List<Property Map>
    friendlyName String
    id String
    The provider-assigned unique ID for this managed resource.
    labels Map<String>
    lastModifiedTime Number
    location String
    materializedViews List<Property Map>
    maxStaleness String
    numBytes Number
    numLongTermBytes Number
    numRows Number
    pulumiLabels Map<String>
    rangePartitionings List<Property Map>
    requirePartitionFilter Boolean
    resourceTags Map<String>
    schema String
    schemaForeignTypeInfos List<Property Map>
    selfLink String
    tableConstraints List<Property Map>
    tableId String
    tableMetadataView String
    tableReplicationInfos List<Property Map>
    timePartitionings List<Property Map>
    type String
    views List<Property Map>
    project String

    Supporting Types

    GetTableBiglakeConfiguration

    ConnectionId string
    The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form "<project_id>.<location_id>.<connection_id>" or "projects/<project_id>/locations/<location_id>/connections/<connection_id>".
    FileFormat string
    The file format the data is stored in.
    StorageUri string
    The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format "gs://bucket/path_to_table/"
    TableFormat string
    The table format the metadata only snapshots are stored in.
    ConnectionId string
    The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form "<project_id>.<location_id>.<connection_id>" or "projects/<project_id>/locations/<location_id>/connections/<connection_id>".
    FileFormat string
    The file format the data is stored in.
    StorageUri string
    The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format "gs://bucket/path_to_table/"
    TableFormat string
    The table format the metadata only snapshots are stored in.
    connectionId String
    The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form "<project_id>.<location_id>.<connection_id>" or "projects/<project_id>/locations/<location_id>/connections/<connection_id>".
    fileFormat String
    The file format the data is stored in.
    storageUri String
    The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format "gs://bucket/path_to_table/"
    tableFormat String
    The table format the metadata only snapshots are stored in.
    connectionId string
    The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form "<project_id>.<location_id>.<connection_id>" or "projects/<project_id>/locations/<location_id>/connections/<connection_id>".
    fileFormat string
    The file format the data is stored in.
    storageUri string
    The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format "gs://bucket/path_to_table/"
    tableFormat string
    The table format the metadata only snapshots are stored in.
    connection_id str
    The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form "<project_id>.<location_id>.<connection_id>" or "projects/<project_id>/locations/<location_id>/connections/<connection_id>".
    file_format str
    The file format the data is stored in.
    storage_uri str
    The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format "gs://bucket/path_to_table/"
    table_format str
    The table format the metadata only snapshots are stored in.
    connectionId String
    The connection specifying the credentials to be used to read and write to external storage, such as Cloud Storage. The connection_id can have the form "<project_id>.<location_id>.<connection_id>" or "projects/<project_id>/locations/<location_id>/connections/<connection_id>".
    fileFormat String
    The file format the data is stored in.
    storageUri String
    The fully qualified location prefix of the external folder where table data is stored. The '*' wildcard character is not allowed. The URI should be in the format "gs://bucket/path_to_table/"
    tableFormat String
    The table format the metadata only snapshots are stored in.

    GetTableEncryptionConfiguration

    KmsKeyName string
    The self link or full name of a key which should be used to encrypt this table. Note that the default bigquery service account will need to have encrypt/decrypt permissions on this key - you may want to see the gcp.bigquery.getDefaultServiceAccount datasource and the gcp.kms.CryptoKeyIAMBinding resource.
    KmsKeyVersion string
    The self link or full name of the kms key version used to encrypt this table.
    KmsKeyName string
    The self link or full name of a key which should be used to encrypt this table. Note that the default bigquery service account will need to have encrypt/decrypt permissions on this key - you may want to see the gcp.bigquery.getDefaultServiceAccount datasource and the gcp.kms.CryptoKeyIAMBinding resource.
    KmsKeyVersion string
    The self link or full name of the kms key version used to encrypt this table.
    kmsKeyName String
    The self link or full name of a key which should be used to encrypt this table. Note that the default bigquery service account will need to have encrypt/decrypt permissions on this key - you may want to see the gcp.bigquery.getDefaultServiceAccount datasource and the gcp.kms.CryptoKeyIAMBinding resource.
    kmsKeyVersion String
    The self link or full name of the kms key version used to encrypt this table.
    kmsKeyName string
    The self link or full name of a key which should be used to encrypt this table. Note that the default bigquery service account will need to have encrypt/decrypt permissions on this key - you may want to see the gcp.bigquery.getDefaultServiceAccount datasource and the gcp.kms.CryptoKeyIAMBinding resource.
    kmsKeyVersion string
    The self link or full name of the kms key version used to encrypt this table.
    kms_key_name str
    The self link or full name of a key which should be used to encrypt this table. Note that the default bigquery service account will need to have encrypt/decrypt permissions on this key - you may want to see the gcp.bigquery.getDefaultServiceAccount datasource and the gcp.kms.CryptoKeyIAMBinding resource.
    kms_key_version str
    The self link or full name of the kms key version used to encrypt this table.
    kmsKeyName String
    The self link or full name of a key which should be used to encrypt this table. Note that the default bigquery service account will need to have encrypt/decrypt permissions on this key - you may want to see the gcp.bigquery.getDefaultServiceAccount datasource and the gcp.kms.CryptoKeyIAMBinding resource.
    kmsKeyVersion String
    The self link or full name of the kms key version used to encrypt this table.

    GetTableExternalCatalogTableOption

    ConnectionId string
    The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connection is needed to read the open source table from BigQuery Engine. The connection_id can have the form <project_id>.<location_id>.<connection_id> or projects/<project_id>/locations/<location_id>/connections/<connection_id>.
    Parameters Dictionary<string, string>
    A map of key value pairs defining the parameters and properties of the open source table. Corresponds with hive meta store table parameters. Maximum size of 4Mib.
    StorageDescriptors List<GetTableExternalCatalogTableOptionStorageDescriptor>
    A storage descriptor containing information about the physical storage of this table.
    ConnectionId string
    The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connection is needed to read the open source table from BigQuery Engine. The connection_id can have the form <project_id>.<location_id>.<connection_id> or projects/<project_id>/locations/<location_id>/connections/<connection_id>.
    Parameters map[string]string
    A map of key value pairs defining the parameters and properties of the open source table. Corresponds with hive meta store table parameters. Maximum size of 4Mib.
    StorageDescriptors []GetTableExternalCatalogTableOptionStorageDescriptor
    A storage descriptor containing information about the physical storage of this table.
    connectionId String
    The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connection is needed to read the open source table from BigQuery Engine. The connection_id can have the form <project_id>.<location_id>.<connection_id> or projects/<project_id>/locations/<location_id>/connections/<connection_id>.
    parameters Map<String,String>
    A map of key value pairs defining the parameters and properties of the open source table. Corresponds with hive meta store table parameters. Maximum size of 4Mib.
    storageDescriptors List<GetTableExternalCatalogTableOptionStorageDescriptor>
    A storage descriptor containing information about the physical storage of this table.
    connectionId string
    The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connection is needed to read the open source table from BigQuery Engine. The connection_id can have the form <project_id>.<location_id>.<connection_id> or projects/<project_id>/locations/<location_id>/connections/<connection_id>.
    parameters {[key: string]: string}
    A map of key value pairs defining the parameters and properties of the open source table. Corresponds with hive meta store table parameters. Maximum size of 4Mib.
    storageDescriptors GetTableExternalCatalogTableOptionStorageDescriptor[]
    A storage descriptor containing information about the physical storage of this table.
    connection_id str
    The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connection is needed to read the open source table from BigQuery Engine. The connection_id can have the form <project_id>.<location_id>.<connection_id> or projects/<project_id>/locations/<location_id>/connections/<connection_id>.
    parameters Mapping[str, str]
    A map of key value pairs defining the parameters and properties of the open source table. Corresponds with hive meta store table parameters. Maximum size of 4Mib.
    storage_descriptors Sequence[GetTableExternalCatalogTableOptionStorageDescriptor]
    A storage descriptor containing information about the physical storage of this table.
    connectionId String
    The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connection is needed to read the open source table from BigQuery Engine. The connection_id can have the form <project_id>.<location_id>.<connection_id> or projects/<project_id>/locations/<location_id>/connections/<connection_id>.
    parameters Map<String>
    A map of key value pairs defining the parameters and properties of the open source table. Corresponds with hive meta store table parameters. Maximum size of 4Mib.
    storageDescriptors List<Property Map>
    A storage descriptor containing information about the physical storage of this table.

    GetTableExternalCatalogTableOptionStorageDescriptor

    InputFormat string
    Specifies the fully qualified class name of the InputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). The maximum length is 128 characters.
    LocationUri string
    The physical location of the table (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-data/pangea-data/*'). The maximum length is 2056 bytes.
    OutputFormat string
    Specifies the fully qualified class name of the OutputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). The maximum length is 128 characters.
    SerdeInfos List<GetTableExternalCatalogTableOptionStorageDescriptorSerdeInfo>
    Serializer and deserializer information.
    InputFormat string
    Specifies the fully qualified class name of the InputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). The maximum length is 128 characters.
    LocationUri string
    The physical location of the table (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-data/pangea-data/*'). The maximum length is 2056 bytes.
    OutputFormat string
    Specifies the fully qualified class name of the OutputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). The maximum length is 128 characters.
    SerdeInfos []GetTableExternalCatalogTableOptionStorageDescriptorSerdeInfo
    Serializer and deserializer information.
    inputFormat String
    Specifies the fully qualified class name of the InputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). The maximum length is 128 characters.
    locationUri String
    The physical location of the table (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-data/pangea-data/*'). The maximum length is 2056 bytes.
    outputFormat String
    Specifies the fully qualified class name of the OutputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). The maximum length is 128 characters.
    serdeInfos List<GetTableExternalCatalogTableOptionStorageDescriptorSerdeInfo>
    Serializer and deserializer information.
    inputFormat string
    Specifies the fully qualified class name of the InputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). The maximum length is 128 characters.
    locationUri string
    The physical location of the table (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-data/pangea-data/*'). The maximum length is 2056 bytes.
    outputFormat string
    Specifies the fully qualified class name of the OutputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). The maximum length is 128 characters.
    serdeInfos GetTableExternalCatalogTableOptionStorageDescriptorSerdeInfo[]
    Serializer and deserializer information.
    input_format str
    Specifies the fully qualified class name of the InputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). The maximum length is 128 characters.
    location_uri str
    The physical location of the table (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-data/pangea-data/*'). The maximum length is 2056 bytes.
    output_format str
    Specifies the fully qualified class name of the OutputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). The maximum length is 128 characters.
    serde_infos Sequence[GetTableExternalCatalogTableOptionStorageDescriptorSerdeInfo]
    Serializer and deserializer information.
    inputFormat String
    Specifies the fully qualified class name of the InputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). The maximum length is 128 characters.
    locationUri String
    The physical location of the table (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-data/pangea-data/*'). The maximum length is 2056 bytes.
    outputFormat String
    Specifies the fully qualified class name of the OutputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). The maximum length is 128 characters.
    serdeInfos List<Property Map>
    Serializer and deserializer information.

    GetTableExternalCatalogTableOptionStorageDescriptorSerdeInfo

    Name string
    Name of the SerDe. The maximum length is 256 characters.
    Parameters Dictionary<string, string>
    Key-value pairs that define the initialization parameters for the serialization library. Maximum size 10 Kib.
    SerializationLibrary string
    Specifies a fully-qualified class name of the serialization library that is responsible for the translation of data between table representation and the underlying low-level input and output format structures. The maximum length is 256 characters.
    Name string
    Name of the SerDe. The maximum length is 256 characters.
    Parameters map[string]string
    Key-value pairs that define the initialization parameters for the serialization library. Maximum size 10 Kib.
    SerializationLibrary string
    Specifies a fully-qualified class name of the serialization library that is responsible for the translation of data between table representation and the underlying low-level input and output format structures. The maximum length is 256 characters.
    name String
    Name of the SerDe. The maximum length is 256 characters.
    parameters Map<String,String>
    Key-value pairs that define the initialization parameters for the serialization library. Maximum size 10 Kib.
    serializationLibrary String
    Specifies a fully-qualified class name of the serialization library that is responsible for the translation of data between table representation and the underlying low-level input and output format structures. The maximum length is 256 characters.
    name string
    Name of the SerDe. The maximum length is 256 characters.
    parameters {[key: string]: string}
    Key-value pairs that define the initialization parameters for the serialization library. Maximum size 10 Kib.
    serializationLibrary string
    Specifies a fully-qualified class name of the serialization library that is responsible for the translation of data between table representation and the underlying low-level input and output format structures. The maximum length is 256 characters.
    name str
    Name of the SerDe. The maximum length is 256 characters.
    parameters Mapping[str, str]
    Key-value pairs that define the initialization parameters for the serialization library. Maximum size 10 Kib.
    serialization_library str
    Specifies a fully-qualified class name of the serialization library that is responsible for the translation of data between table representation and the underlying low-level input and output format structures. The maximum length is 256 characters.
    name String
    Name of the SerDe. The maximum length is 256 characters.
    parameters Map<String>
    Key-value pairs that define the initialization parameters for the serialization library. Maximum size 10 Kib.
    serializationLibrary String
    Specifies a fully-qualified class name of the serialization library that is responsible for the translation of data between table representation and the underlying low-level input and output format structures. The maximum length is 256 characters.

    GetTableExternalDataConfiguration

    Autodetect bool
    Let BigQuery try to autodetect the schema and format of the table.
    AvroOptions List<GetTableExternalDataConfigurationAvroOption>
    Additional options if source_format is set to "AVRO"
    BigtableOptions List<GetTableExternalDataConfigurationBigtableOption>
    Additional options if sourceFormat is set to BIGTABLE.
    Compression string
    The compression type of the data source. Valid values are "NONE" or "GZIP".
    ConnectionId string
    The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connectionId can have the form "..<connection_id>" or "projects//locations//connections/<connection_id>".
    CsvOptions List<GetTableExternalDataConfigurationCsvOption>
    Additional properties to set if source_format is set to "CSV".
    FileSetSpecType string
    Specifies how source URIs are interpreted for constructing the file set to load. By default source URIs are expanded against the underlying storage. Other options include specifying manifest files. Only applicable to object storage systems.
    GoogleSheetsOptions List<GetTableExternalDataConfigurationGoogleSheetsOption>
    Additional options if source_format is set to "GOOGLE_SHEETS".
    HivePartitioningOptions List<GetTableExternalDataConfigurationHivePartitioningOption>
    When set, configures hive partitioning support. Not all storage formats support hive partitioning -- requesting hive partitioning on an unsupported format will lead to an error, as will providing an invalid specification.
    IgnoreUnknownValues bool
    Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false.
    JsonExtension string
    Load option to be used together with sourceFormat newline-delimited JSON to indicate that a variant of JSON is being loaded. To load newline-delimited GeoJSON, specify GEOJSON (and sourceFormat must be set to NEWLINE_DELIMITED_JSON).
    JsonOptions List<GetTableExternalDataConfigurationJsonOption>
    Additional properties to set if sourceFormat is set to JSON.
    MaxBadRecords int
    The maximum number of bad records that BigQuery can ignore when reading data.
    MetadataCacheMode string
    Metadata Cache Mode for the table. Set this to enable caching of metadata from external data source.
    ObjectMetadata string
    Object Metadata is used to create Object Tables. Object Tables contain a listing of objects (with their metadata) found at the sourceUris. If ObjectMetadata is set, sourceFormat should be omitted.
    ParquetOptions List<GetTableExternalDataConfigurationParquetOption>
    Additional properties to set if sourceFormat is set to PARQUET.
    ReferenceFileSchemaUri string
    When creating an external table, the user can provide a reference file with the table schema. This is enabled for the following formats: AVRO, PARQUET, ORC.
    Schema string
    A JSON schema for the external table. Schema is required for CSV and JSON formats and is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats when using external tables.
    SourceFormat string
    Please see sourceFormat under ExternalDataConfiguration in Bigquery's public API documentation (https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externaldataconfiguration) for supported formats. To use "GOOGLE_SHEETS" the scopes must include "googleapis.com/auth/drive.readonly".
    SourceUris List<string>
    A list of the fully-qualified URIs that point to your data in Google Cloud.
    Autodetect bool
    Let BigQuery try to autodetect the schema and format of the table.
    AvroOptions []GetTableExternalDataConfigurationAvroOption
    Additional options if source_format is set to "AVRO"
    BigtableOptions []GetTableExternalDataConfigurationBigtableOption
    Additional options if sourceFormat is set to BIGTABLE.
    Compression string
    The compression type of the data source. Valid values are "NONE" or "GZIP".
    ConnectionId string
    The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connectionId can have the form "..<connection_id>" or "projects//locations//connections/<connection_id>".
    CsvOptions []GetTableExternalDataConfigurationCsvOption
    Additional properties to set if source_format is set to "CSV".
    FileSetSpecType string
    Specifies how source URIs are interpreted for constructing the file set to load. By default source URIs are expanded against the underlying storage. Other options include specifying manifest files. Only applicable to object storage systems.
    GoogleSheetsOptions []GetTableExternalDataConfigurationGoogleSheetsOption
    Additional options if source_format is set to "GOOGLE_SHEETS".
    HivePartitioningOptions []GetTableExternalDataConfigurationHivePartitioningOption
    When set, configures hive partitioning support. Not all storage formats support hive partitioning -- requesting hive partitioning on an unsupported format will lead to an error, as will providing an invalid specification.
    IgnoreUnknownValues bool
    Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false.
    JsonExtension string
    Load option to be used together with sourceFormat newline-delimited JSON to indicate that a variant of JSON is being loaded. To load newline-delimited GeoJSON, specify GEOJSON (and sourceFormat must be set to NEWLINE_DELIMITED_JSON).
    JsonOptions []GetTableExternalDataConfigurationJsonOption
    Additional properties to set if sourceFormat is set to JSON.
    MaxBadRecords int
    The maximum number of bad records that BigQuery can ignore when reading data.
    MetadataCacheMode string
    Metadata Cache Mode for the table. Set this to enable caching of metadata from external data source.
    ObjectMetadata string
    Object Metadata is used to create Object Tables. Object Tables contain a listing of objects (with their metadata) found at the sourceUris. If ObjectMetadata is set, sourceFormat should be omitted.
    ParquetOptions []GetTableExternalDataConfigurationParquetOption
    Additional properties to set if sourceFormat is set to PARQUET.
    ReferenceFileSchemaUri string
    When creating an external table, the user can provide a reference file with the table schema. This is enabled for the following formats: AVRO, PARQUET, ORC.
    Schema string
    A JSON schema for the external table. Schema is required for CSV and JSON formats and is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats when using external tables.
    SourceFormat string
    Please see sourceFormat under ExternalDataConfiguration in Bigquery's public API documentation (https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externaldataconfiguration) for supported formats. To use "GOOGLE_SHEETS" the scopes must include "googleapis.com/auth/drive.readonly".
    SourceUris []string
    A list of the fully-qualified URIs that point to your data in Google Cloud.
    autodetect Boolean
    Let BigQuery try to autodetect the schema and format of the table.
    avroOptions List<GetTableExternalDataConfigurationAvroOption>
    Additional options if source_format is set to "AVRO"
    bigtableOptions List<GetTableExternalDataConfigurationBigtableOption>
    Additional options if sourceFormat is set to BIGTABLE.
    compression String
    The compression type of the data source. Valid values are "NONE" or "GZIP".
    connectionId String
    The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connectionId can have the form "..<connection_id>" or "projects//locations//connections/<connection_id>".
    csvOptions List<GetTableExternalDataConfigurationCsvOption>
    Additional properties to set if source_format is set to "CSV".
    fileSetSpecType String
    Specifies how source URIs are interpreted for constructing the file set to load. By default source URIs are expanded against the underlying storage. Other options include specifying manifest files. Only applicable to object storage systems.
    googleSheetsOptions List<GetTableExternalDataConfigurationGoogleSheetsOption>
    Additional options if source_format is set to "GOOGLE_SHEETS".
    hivePartitioningOptions List<GetTableExternalDataConfigurationHivePartitioningOption>
    When set, configures hive partitioning support. Not all storage formats support hive partitioning -- requesting hive partitioning on an unsupported format will lead to an error, as will providing an invalid specification.
    ignoreUnknownValues Boolean
    Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false.
    jsonExtension String
    Load option to be used together with sourceFormat newline-delimited JSON to indicate that a variant of JSON is being loaded. To load newline-delimited GeoJSON, specify GEOJSON (and sourceFormat must be set to NEWLINE_DELIMITED_JSON).
    jsonOptions List<GetTableExternalDataConfigurationJsonOption>
    Additional properties to set if sourceFormat is set to JSON.
    maxBadRecords Integer
    The maximum number of bad records that BigQuery can ignore when reading data.
    metadataCacheMode String
    Metadata Cache Mode for the table. Set this to enable caching of metadata from external data source.
    objectMetadata String
    Object Metadata is used to create Object Tables. Object Tables contain a listing of objects (with their metadata) found at the sourceUris. If ObjectMetadata is set, sourceFormat should be omitted.
    parquetOptions List<GetTableExternalDataConfigurationParquetOption>
    Additional properties to set if sourceFormat is set to PARQUET.
    referenceFileSchemaUri String
    When creating an external table, the user can provide a reference file with the table schema. This is enabled for the following formats: AVRO, PARQUET, ORC.
    schema String
    A JSON schema for the external table. Schema is required for CSV and JSON formats and is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats when using external tables.
    sourceFormat String
    Please see sourceFormat under ExternalDataConfiguration in Bigquery's public API documentation (https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externaldataconfiguration) for supported formats. To use "GOOGLE_SHEETS" the scopes must include "googleapis.com/auth/drive.readonly".
    sourceUris List<String>
    A list of the fully-qualified URIs that point to your data in Google Cloud.
    autodetect boolean
    Let BigQuery try to autodetect the schema and format of the table.
    avroOptions GetTableExternalDataConfigurationAvroOption[]
    Additional options if source_format is set to "AVRO"
    bigtableOptions GetTableExternalDataConfigurationBigtableOption[]
    Additional options if sourceFormat is set to BIGTABLE.
    compression string
    The compression type of the data source. Valid values are "NONE" or "GZIP".
    connectionId string
    The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connectionId can have the form "..<connection_id>" or "projects//locations//connections/<connection_id>".
    csvOptions GetTableExternalDataConfigurationCsvOption[]
    Additional properties to set if source_format is set to "CSV".
    fileSetSpecType string
    Specifies how source URIs are interpreted for constructing the file set to load. By default source URIs are expanded against the underlying storage. Other options include specifying manifest files. Only applicable to object storage systems.
    googleSheetsOptions GetTableExternalDataConfigurationGoogleSheetsOption[]
    Additional options if source_format is set to "GOOGLE_SHEETS".
    hivePartitioningOptions GetTableExternalDataConfigurationHivePartitioningOption[]
    When set, configures hive partitioning support. Not all storage formats support hive partitioning -- requesting hive partitioning on an unsupported format will lead to an error, as will providing an invalid specification.
    ignoreUnknownValues boolean
    Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false.
    jsonExtension string
    Load option to be used together with sourceFormat newline-delimited JSON to indicate that a variant of JSON is being loaded. To load newline-delimited GeoJSON, specify GEOJSON (and sourceFormat must be set to NEWLINE_DELIMITED_JSON).
    jsonOptions GetTableExternalDataConfigurationJsonOption[]
    Additional properties to set if sourceFormat is set to JSON.
    maxBadRecords number
    The maximum number of bad records that BigQuery can ignore when reading data.
    metadataCacheMode string
    Metadata Cache Mode for the table. Set this to enable caching of metadata from external data source.
    objectMetadata string
    Object Metadata is used to create Object Tables. Object Tables contain a listing of objects (with their metadata) found at the sourceUris. If ObjectMetadata is set, sourceFormat should be omitted.
    parquetOptions GetTableExternalDataConfigurationParquetOption[]
    Additional properties to set if sourceFormat is set to PARQUET.
    referenceFileSchemaUri string
    When creating an external table, the user can provide a reference file with the table schema. This is enabled for the following formats: AVRO, PARQUET, ORC.
    schema string
    A JSON schema for the external table. Schema is required for CSV and JSON formats and is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats when using external tables.
    sourceFormat string
    Please see sourceFormat under ExternalDataConfiguration in Bigquery's public API documentation (https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externaldataconfiguration) for supported formats. To use "GOOGLE_SHEETS" the scopes must include "googleapis.com/auth/drive.readonly".
    sourceUris string[]
    A list of the fully-qualified URIs that point to your data in Google Cloud.
    autodetect bool
    Let BigQuery try to autodetect the schema and format of the table.
    avro_options Sequence[GetTableExternalDataConfigurationAvroOption]
    Additional options if source_format is set to "AVRO"
    bigtable_options Sequence[GetTableExternalDataConfigurationBigtableOption]
    Additional options if sourceFormat is set to BIGTABLE.
    compression str
    The compression type of the data source. Valid values are "NONE" or "GZIP".
    connection_id str
    The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connectionId can have the form "..<connection_id>" or "projects//locations//connections/<connection_id>".
    csv_options Sequence[GetTableExternalDataConfigurationCsvOption]
    Additional properties to set if source_format is set to "CSV".
    file_set_spec_type str
    Specifies how source URIs are interpreted for constructing the file set to load. By default source URIs are expanded against the underlying storage. Other options include specifying manifest files. Only applicable to object storage systems.
    google_sheets_options Sequence[GetTableExternalDataConfigurationGoogleSheetsOption]
    Additional options if source_format is set to "GOOGLE_SHEETS".
    hive_partitioning_options Sequence[GetTableExternalDataConfigurationHivePartitioningOption]
    When set, configures hive partitioning support. Not all storage formats support hive partitioning -- requesting hive partitioning on an unsupported format will lead to an error, as will providing an invalid specification.
    ignore_unknown_values bool
    Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false.
    json_extension str
    Load option to be used together with sourceFormat newline-delimited JSON to indicate that a variant of JSON is being loaded. To load newline-delimited GeoJSON, specify GEOJSON (and sourceFormat must be set to NEWLINE_DELIMITED_JSON).
    json_options Sequence[GetTableExternalDataConfigurationJsonOption]
    Additional properties to set if sourceFormat is set to JSON.
    max_bad_records int
    The maximum number of bad records that BigQuery can ignore when reading data.
    metadata_cache_mode str
    Metadata Cache Mode for the table. Set this to enable caching of metadata from external data source.
    object_metadata str
    Object Metadata is used to create Object Tables. Object Tables contain a listing of objects (with their metadata) found at the sourceUris. If ObjectMetadata is set, sourceFormat should be omitted.
    parquet_options Sequence[GetTableExternalDataConfigurationParquetOption]
    Additional properties to set if sourceFormat is set to PARQUET.
    reference_file_schema_uri str
    When creating an external table, the user can provide a reference file with the table schema. This is enabled for the following formats: AVRO, PARQUET, ORC.
    schema str
    A JSON schema for the external table. Schema is required for CSV and JSON formats and is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats when using external tables.
    source_format str
    Please see sourceFormat under ExternalDataConfiguration in Bigquery's public API documentation (https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externaldataconfiguration) for supported formats. To use "GOOGLE_SHEETS" the scopes must include "googleapis.com/auth/drive.readonly".
    source_uris Sequence[str]
    A list of the fully-qualified URIs that point to your data in Google Cloud.
    autodetect Boolean
    Let BigQuery try to autodetect the schema and format of the table.
    avroOptions List<Property Map>
    Additional options if source_format is set to "AVRO"
    bigtableOptions List<Property Map>
    Additional options if sourceFormat is set to BIGTABLE.
    compression String
    The compression type of the data source. Valid values are "NONE" or "GZIP".
    connectionId String
    The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connectionId can have the form "..<connection_id>" or "projects//locations//connections/<connection_id>".
    csvOptions List<Property Map>
    Additional properties to set if source_format is set to "CSV".
    fileSetSpecType String
    Specifies how source URIs are interpreted for constructing the file set to load. By default source URIs are expanded against the underlying storage. Other options include specifying manifest files. Only applicable to object storage systems.
    googleSheetsOptions List<Property Map>
    Additional options if source_format is set to "GOOGLE_SHEETS".
    hivePartitioningOptions List<Property Map>
    When set, configures hive partitioning support. Not all storage formats support hive partitioning -- requesting hive partitioning on an unsupported format will lead to an error, as will providing an invalid specification.
    ignoreUnknownValues Boolean
    Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false.
    jsonExtension String
    Load option to be used together with sourceFormat newline-delimited JSON to indicate that a variant of JSON is being loaded. To load newline-delimited GeoJSON, specify GEOJSON (and sourceFormat must be set to NEWLINE_DELIMITED_JSON).
    jsonOptions List<Property Map>
    Additional properties to set if sourceFormat is set to JSON.
    maxBadRecords Number
    The maximum number of bad records that BigQuery can ignore when reading data.
    metadataCacheMode String
    Metadata Cache Mode for the table. Set this to enable caching of metadata from external data source.
    objectMetadata String
    Object Metadata is used to create Object Tables. Object Tables contain a listing of objects (with their metadata) found at the sourceUris. If ObjectMetadata is set, sourceFormat should be omitted.
    parquetOptions List<Property Map>
    Additional properties to set if sourceFormat is set to PARQUET.
    referenceFileSchemaUri String
    When creating an external table, the user can provide a reference file with the table schema. This is enabled for the following formats: AVRO, PARQUET, ORC.
    schema String
    A JSON schema for the external table. Schema is required for CSV and JSON formats and is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats when using external tables.
    sourceFormat String
    Please see sourceFormat under ExternalDataConfiguration in Bigquery's public API documentation (https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#externaldataconfiguration) for supported formats. To use "GOOGLE_SHEETS" the scopes must include "googleapis.com/auth/drive.readonly".
    sourceUris List<String>
    A list of the fully-qualified URIs that point to your data in Google Cloud.

    GetTableExternalDataConfigurationAvroOption

    UseAvroLogicalTypes bool
    If sourceFormat is set to "AVRO", indicates whether to interpret logical types as the corresponding BigQuery data type (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER).
    UseAvroLogicalTypes bool
    If sourceFormat is set to "AVRO", indicates whether to interpret logical types as the corresponding BigQuery data type (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER).
    useAvroLogicalTypes Boolean
    If sourceFormat is set to "AVRO", indicates whether to interpret logical types as the corresponding BigQuery data type (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER).
    useAvroLogicalTypes boolean
    If sourceFormat is set to "AVRO", indicates whether to interpret logical types as the corresponding BigQuery data type (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER).
    use_avro_logical_types bool
    If sourceFormat is set to "AVRO", indicates whether to interpret logical types as the corresponding BigQuery data type (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER).
    useAvroLogicalTypes Boolean
    If sourceFormat is set to "AVRO", indicates whether to interpret logical types as the corresponding BigQuery data type (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER).

    GetTableExternalDataConfigurationBigtableOption

    ColumnFamilies List<GetTableExternalDataConfigurationBigtableOptionColumnFamily>
    A list of column families to expose in the table schema along with their types. This list restricts the column families that can be referenced in queries and specifies their value types. You can use this list to do type conversions - see the 'type' field for more details. If you leave this list empty, all column families are present in the table schema and their values are read as BYTES. During a query only the column families referenced in that query are read from Bigtable.
    IgnoreUnspecifiedColumnFamilies bool
    If field is true, then the column families that are not specified in columnFamilies list are not exposed in the table schema. Otherwise, they are read with BYTES type values. The default value is false.
    OutputColumnFamiliesAsJson bool
    If field is true, then each column family will be read as a single JSON column. Otherwise they are read as a repeated cell structure containing timestamp/value tuples. The default value is false.
    ReadRowkeyAsString bool
    If field is true, then the rowkey column families will be read and converted to string. Otherwise they are read with BYTES type values and users need to manually cast them with CAST if necessary. The default value is false.
    ColumnFamilies []GetTableExternalDataConfigurationBigtableOptionColumnFamily
    A list of column families to expose in the table schema along with their types. This list restricts the column families that can be referenced in queries and specifies their value types. You can use this list to do type conversions - see the 'type' field for more details. If you leave this list empty, all column families are present in the table schema and their values are read as BYTES. During a query only the column families referenced in that query are read from Bigtable.
    IgnoreUnspecifiedColumnFamilies bool
    If field is true, then the column families that are not specified in columnFamilies list are not exposed in the table schema. Otherwise, they are read with BYTES type values. The default value is false.
    OutputColumnFamiliesAsJson bool
    If field is true, then each column family will be read as a single JSON column. Otherwise they are read as a repeated cell structure containing timestamp/value tuples. The default value is false.
    ReadRowkeyAsString bool
    If field is true, then the rowkey column families will be read and converted to string. Otherwise they are read with BYTES type values and users need to manually cast them with CAST if necessary. The default value is false.
    columnFamilies List<GetTableExternalDataConfigurationBigtableOptionColumnFamily>
    A list of column families to expose in the table schema along with their types. This list restricts the column families that can be referenced in queries and specifies their value types. You can use this list to do type conversions - see the 'type' field for more details. If you leave this list empty, all column families are present in the table schema and their values are read as BYTES. During a query only the column families referenced in that query are read from Bigtable.
    ignoreUnspecifiedColumnFamilies Boolean
    If field is true, then the column families that are not specified in columnFamilies list are not exposed in the table schema. Otherwise, they are read with BYTES type values. The default value is false.
    outputColumnFamiliesAsJson Boolean
    If field is true, then each column family will be read as a single JSON column. Otherwise they are read as a repeated cell structure containing timestamp/value tuples. The default value is false.
    readRowkeyAsString Boolean
    If field is true, then the rowkey column families will be read and converted to string. Otherwise they are read with BYTES type values and users need to manually cast them with CAST if necessary. The default value is false.
    columnFamilies GetTableExternalDataConfigurationBigtableOptionColumnFamily[]
    A list of column families to expose in the table schema along with their types. This list restricts the column families that can be referenced in queries and specifies their value types. You can use this list to do type conversions - see the 'type' field for more details. If you leave this list empty, all column families are present in the table schema and their values are read as BYTES. During a query only the column families referenced in that query are read from Bigtable.
    ignoreUnspecifiedColumnFamilies boolean
    If field is true, then the column families that are not specified in columnFamilies list are not exposed in the table schema. Otherwise, they are read with BYTES type values. The default value is false.
    outputColumnFamiliesAsJson boolean
    If field is true, then each column family will be read as a single JSON column. Otherwise they are read as a repeated cell structure containing timestamp/value tuples. The default value is false.
    readRowkeyAsString boolean
    If field is true, then the rowkey column families will be read and converted to string. Otherwise they are read with BYTES type values and users need to manually cast them with CAST if necessary. The default value is false.
    column_families Sequence[GetTableExternalDataConfigurationBigtableOptionColumnFamily]
    A list of column families to expose in the table schema along with their types. This list restricts the column families that can be referenced in queries and specifies their value types. You can use this list to do type conversions - see the 'type' field for more details. If you leave this list empty, all column families are present in the table schema and their values are read as BYTES. During a query only the column families referenced in that query are read from Bigtable.
    ignore_unspecified_column_families bool
    If field is true, then the column families that are not specified in columnFamilies list are not exposed in the table schema. Otherwise, they are read with BYTES type values. The default value is false.
    output_column_families_as_json bool
    If field is true, then each column family will be read as a single JSON column. Otherwise they are read as a repeated cell structure containing timestamp/value tuples. The default value is false.
    read_rowkey_as_string bool
    If field is true, then the rowkey column families will be read and converted to string. Otherwise they are read with BYTES type values and users need to manually cast them with CAST if necessary. The default value is false.
    columnFamilies List<Property Map>
    A list of column families to expose in the table schema along with their types. This list restricts the column families that can be referenced in queries and specifies their value types. You can use this list to do type conversions - see the 'type' field for more details. If you leave this list empty, all column families are present in the table schema and their values are read as BYTES. During a query only the column families referenced in that query are read from Bigtable.
    ignoreUnspecifiedColumnFamilies Boolean
    If field is true, then the column families that are not specified in columnFamilies list are not exposed in the table schema. Otherwise, they are read with BYTES type values. The default value is false.
    outputColumnFamiliesAsJson Boolean
    If field is true, then each column family will be read as a single JSON column. Otherwise they are read as a repeated cell structure containing timestamp/value tuples. The default value is false.
    readRowkeyAsString Boolean
    If field is true, then the rowkey column families will be read and converted to string. Otherwise they are read with BYTES type values and users need to manually cast them with CAST if necessary. The default value is false.

    GetTableExternalDataConfigurationBigtableOptionColumnFamily

    Columns List<GetTableExternalDataConfigurationBigtableOptionColumnFamilyColumn>
    A List of columns that should be exposed as individual fields as opposed to a list of (column name, value) pairs. All columns whose qualifier matches a qualifier in this list can be accessed as Other columns can be accessed as a list through column field
    Encoding string
    The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. This can be overridden for a specific column by listing that column in 'columns' and specifying an encoding for it.
    FamilyId string
    Identifier of the column family.
    OnlyReadLatest bool
    If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column.
    Type string
    The type to convert the value in cells of this column family. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive): "BYTES", "STRING", "INTEGER", "FLOAT", "BOOLEAN", "JSON". Default type is BYTES. This can be overridden for a specific column by listing that column in 'columns' and specifying a type for it.
    Columns []GetTableExternalDataConfigurationBigtableOptionColumnFamilyColumn
    A List of columns that should be exposed as individual fields as opposed to a list of (column name, value) pairs. All columns whose qualifier matches a qualifier in this list can be accessed as Other columns can be accessed as a list through column field
    Encoding string
    The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. This can be overridden for a specific column by listing that column in 'columns' and specifying an encoding for it.
    FamilyId string
    Identifier of the column family.
    OnlyReadLatest bool
    If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column.
    Type string
    The type to convert the value in cells of this column family. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive): "BYTES", "STRING", "INTEGER", "FLOAT", "BOOLEAN", "JSON". Default type is BYTES. This can be overridden for a specific column by listing that column in 'columns' and specifying a type for it.
    columns List<GetTableExternalDataConfigurationBigtableOptionColumnFamilyColumn>
    A List of columns that should be exposed as individual fields as opposed to a list of (column name, value) pairs. All columns whose qualifier matches a qualifier in this list can be accessed as Other columns can be accessed as a list through column field
    encoding String
    The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. This can be overridden for a specific column by listing that column in 'columns' and specifying an encoding for it.
    familyId String
    Identifier of the column family.
    onlyReadLatest Boolean
    If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column.
    type String
    The type to convert the value in cells of this column family. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive): "BYTES", "STRING", "INTEGER", "FLOAT", "BOOLEAN", "JSON". Default type is BYTES. This can be overridden for a specific column by listing that column in 'columns' and specifying a type for it.
    columns GetTableExternalDataConfigurationBigtableOptionColumnFamilyColumn[]
    A List of columns that should be exposed as individual fields as opposed to a list of (column name, value) pairs. All columns whose qualifier matches a qualifier in this list can be accessed as Other columns can be accessed as a list through column field
    encoding string
    The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. This can be overridden for a specific column by listing that column in 'columns' and specifying an encoding for it.
    familyId string
    Identifier of the column family.
    onlyReadLatest boolean
    If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column.
    type string
    The type to convert the value in cells of this column family. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive): "BYTES", "STRING", "INTEGER", "FLOAT", "BOOLEAN", "JSON". Default type is BYTES. This can be overridden for a specific column by listing that column in 'columns' and specifying a type for it.
    columns Sequence[GetTableExternalDataConfigurationBigtableOptionColumnFamilyColumn]
    A List of columns that should be exposed as individual fields as opposed to a list of (column name, value) pairs. All columns whose qualifier matches a qualifier in this list can be accessed as Other columns can be accessed as a list through column field
    encoding str
    The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. This can be overridden for a specific column by listing that column in 'columns' and specifying an encoding for it.
    family_id str
    Identifier of the column family.
    only_read_latest bool
    If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column.
    type str
    The type to convert the value in cells of this column family. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive): "BYTES", "STRING", "INTEGER", "FLOAT", "BOOLEAN", "JSON". Default type is BYTES. This can be overridden for a specific column by listing that column in 'columns' and specifying a type for it.
    columns List<Property Map>
    A List of columns that should be exposed as individual fields as opposed to a list of (column name, value) pairs. All columns whose qualifier matches a qualifier in this list can be accessed as Other columns can be accessed as a list through column field
    encoding String
    The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. This can be overridden for a specific column by listing that column in 'columns' and specifying an encoding for it.
    familyId String
    Identifier of the column family.
    onlyReadLatest Boolean
    If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column.
    type String
    The type to convert the value in cells of this column family. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive): "BYTES", "STRING", "INTEGER", "FLOAT", "BOOLEAN", "JSON". Default type is BYTES. This can be overridden for a specific column by listing that column in 'columns' and specifying a type for it.

    GetTableExternalDataConfigurationBigtableOptionColumnFamilyColumn

    Encoding string
    The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. 'encoding' can also be set at the column family level. However, the setting at this level takes precedence if 'encoding' is set at both levels.
    FieldName string
    If the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as the column field name and is used as field name in queries.
    OnlyReadLatest bool
    If this is set, only the latest version of value in this column are exposed. 'onlyReadLatest' can also be set at the column family level. However, the setting at this level takes precedence if 'onlyReadLatest' is set at both levels.
    QualifierEncoded string
    Qualifier of the column. Columns in the parent column family that has this exact qualifier are exposed as . field. If the qualifier is valid UTF-8 string, it can be specified in the qualifierString field. Otherwise, a base-64 encoded value must be set to qualifierEncoded. The column field name is the same as the column qualifier. However, if the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as fieldName.
    QualifierString string
    Qualifier string.
    Type string
    The type to convert the value in cells of this column. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive): "BYTES", "STRING", "INTEGER", "FLOAT", "BOOLEAN", "JSON", Default type is "BYTES". 'type' can also be set at the column family level. However, the setting at this level takes precedence if 'type' is set at both levels.
    Encoding string
    The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. 'encoding' can also be set at the column family level. However, the setting at this level takes precedence if 'encoding' is set at both levels.
    FieldName string
    If the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as the column field name and is used as field name in queries.
    OnlyReadLatest bool
    If this is set, only the latest version of value in this column are exposed. 'onlyReadLatest' can also be set at the column family level. However, the setting at this level takes precedence if 'onlyReadLatest' is set at both levels.
    QualifierEncoded string
    Qualifier of the column. Columns in the parent column family that has this exact qualifier are exposed as . field. If the qualifier is valid UTF-8 string, it can be specified in the qualifierString field. Otherwise, a base-64 encoded value must be set to qualifierEncoded. The column field name is the same as the column qualifier. However, if the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as fieldName.
    QualifierString string
    Qualifier string.
    Type string
    The type to convert the value in cells of this column. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive): "BYTES", "STRING", "INTEGER", "FLOAT", "BOOLEAN", "JSON", Default type is "BYTES". 'type' can also be set at the column family level. However, the setting at this level takes precedence if 'type' is set at both levels.
    encoding String
    The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. 'encoding' can also be set at the column family level. However, the setting at this level takes precedence if 'encoding' is set at both levels.
    fieldName String
    If the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as the column field name and is used as field name in queries.
    onlyReadLatest Boolean
    If this is set, only the latest version of value in this column are exposed. 'onlyReadLatest' can also be set at the column family level. However, the setting at this level takes precedence if 'onlyReadLatest' is set at both levels.
    qualifierEncoded String
    Qualifier of the column. Columns in the parent column family that has this exact qualifier are exposed as . field. If the qualifier is valid UTF-8 string, it can be specified in the qualifierString field. Otherwise, a base-64 encoded value must be set to qualifierEncoded. The column field name is the same as the column qualifier. However, if the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as fieldName.
    qualifierString String
    Qualifier string.
    type String
    The type to convert the value in cells of this column. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive): "BYTES", "STRING", "INTEGER", "FLOAT", "BOOLEAN", "JSON", Default type is "BYTES". 'type' can also be set at the column family level. However, the setting at this level takes precedence if 'type' is set at both levels.
    encoding string
    The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. 'encoding' can also be set at the column family level. However, the setting at this level takes precedence if 'encoding' is set at both levels.
    fieldName string
    If the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as the column field name and is used as field name in queries.
    onlyReadLatest boolean
    If this is set, only the latest version of value in this column are exposed. 'onlyReadLatest' can also be set at the column family level. However, the setting at this level takes precedence if 'onlyReadLatest' is set at both levels.
    qualifierEncoded string
    Qualifier of the column. Columns in the parent column family that has this exact qualifier are exposed as . field. If the qualifier is valid UTF-8 string, it can be specified in the qualifierString field. Otherwise, a base-64 encoded value must be set to qualifierEncoded. The column field name is the same as the column qualifier. However, if the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as fieldName.
    qualifierString string
    Qualifier string.
    type string
    The type to convert the value in cells of this column. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive): "BYTES", "STRING", "INTEGER", "FLOAT", "BOOLEAN", "JSON", Default type is "BYTES". 'type' can also be set at the column family level. However, the setting at this level takes precedence if 'type' is set at both levels.
    encoding str
    The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. 'encoding' can also be set at the column family level. However, the setting at this level takes precedence if 'encoding' is set at both levels.
    field_name str
    If the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as the column field name and is used as field name in queries.
    only_read_latest bool
    If this is set, only the latest version of value in this column are exposed. 'onlyReadLatest' can also be set at the column family level. However, the setting at this level takes precedence if 'onlyReadLatest' is set at both levels.
    qualifier_encoded str
    Qualifier of the column. Columns in the parent column family that has this exact qualifier are exposed as . field. If the qualifier is valid UTF-8 string, it can be specified in the qualifierString field. Otherwise, a base-64 encoded value must be set to qualifierEncoded. The column field name is the same as the column qualifier. However, if the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as fieldName.
    qualifier_string str
    Qualifier string.
    type str
    The type to convert the value in cells of this column. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive): "BYTES", "STRING", "INTEGER", "FLOAT", "BOOLEAN", "JSON", Default type is "BYTES". 'type' can also be set at the column family level. However, the setting at this level takes precedence if 'type' is set at both levels.
    encoding String
    The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. 'encoding' can also be set at the column family level. However, the setting at this level takes precedence if 'encoding' is set at both levels.
    fieldName String
    If the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as the column field name and is used as field name in queries.
    onlyReadLatest Boolean
    If this is set, only the latest version of value in this column are exposed. 'onlyReadLatest' can also be set at the column family level. However, the setting at this level takes precedence if 'onlyReadLatest' is set at both levels.
    qualifierEncoded String
    Qualifier of the column. Columns in the parent column family that has this exact qualifier are exposed as . field. If the qualifier is valid UTF-8 string, it can be specified in the qualifierString field. Otherwise, a base-64 encoded value must be set to qualifierEncoded. The column field name is the same as the column qualifier. However, if the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as fieldName.
    qualifierString String
    Qualifier string.
    type String
    The type to convert the value in cells of this column. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive): "BYTES", "STRING", "INTEGER", "FLOAT", "BOOLEAN", "JSON", Default type is "BYTES". 'type' can also be set at the column family level. However, the setting at this level takes precedence if 'type' is set at both levels.

    GetTableExternalDataConfigurationCsvOption

    AllowJaggedRows bool
    Indicates if BigQuery should accept rows that are missing trailing optional columns.
    AllowQuotedNewlines bool
    Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false.
    Encoding string
    The character encoding of the data. The supported values are UTF-8 or ISO-8859-1.
    FieldDelimiter string
    The separator for fields in a CSV file.
    Quote string
    SkipLeadingRows int
    The number of rows at the top of a CSV file that BigQuery will skip when reading the data.
    AllowJaggedRows bool
    Indicates if BigQuery should accept rows that are missing trailing optional columns.
    AllowQuotedNewlines bool
    Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false.
    Encoding string
    The character encoding of the data. The supported values are UTF-8 or ISO-8859-1.
    FieldDelimiter string
    The separator for fields in a CSV file.
    Quote string
    SkipLeadingRows int
    The number of rows at the top of a CSV file that BigQuery will skip when reading the data.
    allowJaggedRows Boolean
    Indicates if BigQuery should accept rows that are missing trailing optional columns.
    allowQuotedNewlines Boolean
    Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false.
    encoding String
    The character encoding of the data. The supported values are UTF-8 or ISO-8859-1.
    fieldDelimiter String
    The separator for fields in a CSV file.
    quote String
    skipLeadingRows Integer
    The number of rows at the top of a CSV file that BigQuery will skip when reading the data.
    allowJaggedRows boolean
    Indicates if BigQuery should accept rows that are missing trailing optional columns.
    allowQuotedNewlines boolean
    Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false.
    encoding string
    The character encoding of the data. The supported values are UTF-8 or ISO-8859-1.
    fieldDelimiter string
    The separator for fields in a CSV file.
    quote string
    skipLeadingRows number
    The number of rows at the top of a CSV file that BigQuery will skip when reading the data.
    allow_jagged_rows bool
    Indicates if BigQuery should accept rows that are missing trailing optional columns.
    allow_quoted_newlines bool
    Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false.
    encoding str
    The character encoding of the data. The supported values are UTF-8 or ISO-8859-1.
    field_delimiter str
    The separator for fields in a CSV file.
    quote str
    skip_leading_rows int
    The number of rows at the top of a CSV file that BigQuery will skip when reading the data.
    allowJaggedRows Boolean
    Indicates if BigQuery should accept rows that are missing trailing optional columns.
    allowQuotedNewlines Boolean
    Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false.
    encoding String
    The character encoding of the data. The supported values are UTF-8 or ISO-8859-1.
    fieldDelimiter String
    The separator for fields in a CSV file.
    quote String
    skipLeadingRows Number
    The number of rows at the top of a CSV file that BigQuery will skip when reading the data.

    GetTableExternalDataConfigurationGoogleSheetsOption

    Range string
    Range of a sheet to query from. Only used when non-empty. At least one of range or skip_leading_rows must be set. Typical format: "sheet_name!top_left_cell_id:bottom_right_cell_id" For example: "sheet1!A1:B20
    SkipLeadingRows int
    The number of rows at the top of the sheet that BigQuery will skip when reading the data. At least one of range or skip_leading_rows must be set.
    Range string
    Range of a sheet to query from. Only used when non-empty. At least one of range or skip_leading_rows must be set. Typical format: "sheet_name!top_left_cell_id:bottom_right_cell_id" For example: "sheet1!A1:B20
    SkipLeadingRows int
    The number of rows at the top of the sheet that BigQuery will skip when reading the data. At least one of range or skip_leading_rows must be set.
    range String
    Range of a sheet to query from. Only used when non-empty. At least one of range or skip_leading_rows must be set. Typical format: "sheet_name!top_left_cell_id:bottom_right_cell_id" For example: "sheet1!A1:B20
    skipLeadingRows Integer
    The number of rows at the top of the sheet that BigQuery will skip when reading the data. At least one of range or skip_leading_rows must be set.
    range string
    Range of a sheet to query from. Only used when non-empty. At least one of range or skip_leading_rows must be set. Typical format: "sheet_name!top_left_cell_id:bottom_right_cell_id" For example: "sheet1!A1:B20
    skipLeadingRows number
    The number of rows at the top of the sheet that BigQuery will skip when reading the data. At least one of range or skip_leading_rows must be set.
    range str
    Range of a sheet to query from. Only used when non-empty. At least one of range or skip_leading_rows must be set. Typical format: "sheet_name!top_left_cell_id:bottom_right_cell_id" For example: "sheet1!A1:B20
    skip_leading_rows int
    The number of rows at the top of the sheet that BigQuery will skip when reading the data. At least one of range or skip_leading_rows must be set.
    range String
    Range of a sheet to query from. Only used when non-empty. At least one of range or skip_leading_rows must be set. Typical format: "sheet_name!top_left_cell_id:bottom_right_cell_id" For example: "sheet1!A1:B20
    skipLeadingRows Number
    The number of rows at the top of the sheet that BigQuery will skip when reading the data. At least one of range or skip_leading_rows must be set.

    GetTableExternalDataConfigurationHivePartitioningOption

    Mode string
    When set, what mode of hive partitioning to use when reading data.
    RequirePartitionFilter bool
    If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
    SourceUriPrefix string
    When hive partition detection is requested, a common for all source uris must be required. The prefix must end immediately before the partition key encoding begins.
    Mode string
    When set, what mode of hive partitioning to use when reading data.
    RequirePartitionFilter bool
    If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
    SourceUriPrefix string
    When hive partition detection is requested, a common for all source uris must be required. The prefix must end immediately before the partition key encoding begins.
    mode String
    When set, what mode of hive partitioning to use when reading data.
    requirePartitionFilter Boolean
    If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
    sourceUriPrefix String
    When hive partition detection is requested, a common for all source uris must be required. The prefix must end immediately before the partition key encoding begins.
    mode string
    When set, what mode of hive partitioning to use when reading data.
    requirePartitionFilter boolean
    If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
    sourceUriPrefix string
    When hive partition detection is requested, a common for all source uris must be required. The prefix must end immediately before the partition key encoding begins.
    mode str
    When set, what mode of hive partitioning to use when reading data.
    require_partition_filter bool
    If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
    source_uri_prefix str
    When hive partition detection is requested, a common for all source uris must be required. The prefix must end immediately before the partition key encoding begins.
    mode String
    When set, what mode of hive partitioning to use when reading data.
    requirePartitionFilter Boolean
    If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
    sourceUriPrefix String
    When hive partition detection is requested, a common for all source uris must be required. The prefix must end immediately before the partition key encoding begins.

    GetTableExternalDataConfigurationJsonOption

    Encoding string
    The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8.
    Encoding string
    The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8.
    encoding String
    The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8.
    encoding string
    The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8.
    encoding str
    The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8.
    encoding String
    The character encoding of the data. The supported values are UTF-8, UTF-16BE, UTF-16LE, UTF-32BE, and UTF-32LE. The default value is UTF-8.

    GetTableExternalDataConfigurationParquetOption

    EnableListInference bool
    Indicates whether to use schema inference specifically for Parquet LIST logical type.
    EnumAsString bool
    Indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default.
    EnableListInference bool
    Indicates whether to use schema inference specifically for Parquet LIST logical type.
    EnumAsString bool
    Indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default.
    enableListInference Boolean
    Indicates whether to use schema inference specifically for Parquet LIST logical type.
    enumAsString Boolean
    Indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default.
    enableListInference boolean
    Indicates whether to use schema inference specifically for Parquet LIST logical type.
    enumAsString boolean
    Indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default.
    enable_list_inference bool
    Indicates whether to use schema inference specifically for Parquet LIST logical type.
    enum_as_string bool
    Indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default.
    enableListInference Boolean
    Indicates whether to use schema inference specifically for Parquet LIST logical type.
    enumAsString Boolean
    Indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default.

    GetTableMaterializedView

    AllowNonIncrementalDefinition bool
    Allow non incremental materialized view definition. The default value is false.
    EnableRefresh bool
    Specifies if BigQuery should automatically refresh materialized view when the base table is updated. The default is true.
    Query string
    A query whose result is persisted.
    RefreshIntervalMs int
    Specifies maximum frequency at which this materialized view will be refreshed. The default is 1800000.
    AllowNonIncrementalDefinition bool
    Allow non incremental materialized view definition. The default value is false.
    EnableRefresh bool
    Specifies if BigQuery should automatically refresh materialized view when the base table is updated. The default is true.
    Query string
    A query whose result is persisted.
    RefreshIntervalMs int
    Specifies maximum frequency at which this materialized view will be refreshed. The default is 1800000.
    allowNonIncrementalDefinition Boolean
    Allow non incremental materialized view definition. The default value is false.
    enableRefresh Boolean
    Specifies if BigQuery should automatically refresh materialized view when the base table is updated. The default is true.
    query String
    A query whose result is persisted.
    refreshIntervalMs Integer
    Specifies maximum frequency at which this materialized view will be refreshed. The default is 1800000.
    allowNonIncrementalDefinition boolean
    Allow non incremental materialized view definition. The default value is false.
    enableRefresh boolean
    Specifies if BigQuery should automatically refresh materialized view when the base table is updated. The default is true.
    query string
    A query whose result is persisted.
    refreshIntervalMs number
    Specifies maximum frequency at which this materialized view will be refreshed. The default is 1800000.
    allow_non_incremental_definition bool
    Allow non incremental materialized view definition. The default value is false.
    enable_refresh bool
    Specifies if BigQuery should automatically refresh materialized view when the base table is updated. The default is true.
    query str
    A query whose result is persisted.
    refresh_interval_ms int
    Specifies maximum frequency at which this materialized view will be refreshed. The default is 1800000.
    allowNonIncrementalDefinition Boolean
    Allow non incremental materialized view definition. The default value is false.
    enableRefresh Boolean
    Specifies if BigQuery should automatically refresh materialized view when the base table is updated. The default is true.
    query String
    A query whose result is persisted.
    refreshIntervalMs Number
    Specifies maximum frequency at which this materialized view will be refreshed. The default is 1800000.

    GetTableRangePartitioning

    Field string
    The field used to determine how to create a range-based partition.
    Ranges List<GetTableRangePartitioningRange>
    Information required to partition based on ranges. Structure is documented below.
    Field string
    The field used to determine how to create a range-based partition.
    Ranges []GetTableRangePartitioningRange
    Information required to partition based on ranges. Structure is documented below.
    field String
    The field used to determine how to create a range-based partition.
    ranges List<GetTableRangePartitioningRange>
    Information required to partition based on ranges. Structure is documented below.
    field string
    The field used to determine how to create a range-based partition.
    ranges GetTableRangePartitioningRange[]
    Information required to partition based on ranges. Structure is documented below.
    field str
    The field used to determine how to create a range-based partition.
    ranges Sequence[GetTableRangePartitioningRange]
    Information required to partition based on ranges. Structure is documented below.
    field String
    The field used to determine how to create a range-based partition.
    ranges List<Property Map>
    Information required to partition based on ranges. Structure is documented below.

    GetTableRangePartitioningRange

    End int
    End of the range partitioning, exclusive.
    Interval int
    The width of each range within the partition.
    Start int
    Start of the range partitioning, inclusive.
    End int
    End of the range partitioning, exclusive.
    Interval int
    The width of each range within the partition.
    Start int
    Start of the range partitioning, inclusive.
    end Integer
    End of the range partitioning, exclusive.
    interval Integer
    The width of each range within the partition.
    start Integer
    Start of the range partitioning, inclusive.
    end number
    End of the range partitioning, exclusive.
    interval number
    The width of each range within the partition.
    start number
    Start of the range partitioning, inclusive.
    end int
    End of the range partitioning, exclusive.
    interval int
    The width of each range within the partition.
    start int
    Start of the range partitioning, inclusive.
    end Number
    End of the range partitioning, exclusive.
    interval Number
    The width of each range within the partition.
    start Number
    Start of the range partitioning, inclusive.

    GetTableSchemaForeignTypeInfo

    TypeSystem string
    Specifies the system which defines the foreign data type.
    TypeSystem string
    Specifies the system which defines the foreign data type.
    typeSystem String
    Specifies the system which defines the foreign data type.
    typeSystem string
    Specifies the system which defines the foreign data type.
    type_system str
    Specifies the system which defines the foreign data type.
    typeSystem String
    Specifies the system which defines the foreign data type.

    GetTableTableConstraint

    ForeignKeys List<GetTableTableConstraintForeignKey>
    Present only if the table has a foreign key. The foreign key is not enforced.
    PrimaryKeys List<GetTableTableConstraintPrimaryKey>
    Represents a primary key constraint on a table's columns. Present only if the table has a primary key. The primary key is not enforced.
    ForeignKeys []GetTableTableConstraintForeignKey
    Present only if the table has a foreign key. The foreign key is not enforced.
    PrimaryKeys []GetTableTableConstraintPrimaryKey
    Represents a primary key constraint on a table's columns. Present only if the table has a primary key. The primary key is not enforced.
    foreignKeys List<GetTableTableConstraintForeignKey>
    Present only if the table has a foreign key. The foreign key is not enforced.
    primaryKeys List<GetTableTableConstraintPrimaryKey>
    Represents a primary key constraint on a table's columns. Present only if the table has a primary key. The primary key is not enforced.
    foreignKeys GetTableTableConstraintForeignKey[]
    Present only if the table has a foreign key. The foreign key is not enforced.
    primaryKeys GetTableTableConstraintPrimaryKey[]
    Represents a primary key constraint on a table's columns. Present only if the table has a primary key. The primary key is not enforced.
    foreign_keys Sequence[GetTableTableConstraintForeignKey]
    Present only if the table has a foreign key. The foreign key is not enforced.
    primary_keys Sequence[GetTableTableConstraintPrimaryKey]
    Represents a primary key constraint on a table's columns. Present only if the table has a primary key. The primary key is not enforced.
    foreignKeys List<Property Map>
    Present only if the table has a foreign key. The foreign key is not enforced.
    primaryKeys List<Property Map>
    Represents a primary key constraint on a table's columns. Present only if the table has a primary key. The primary key is not enforced.

    GetTableTableConstraintForeignKey

    ColumnReferences List<GetTableTableConstraintForeignKeyColumnReference>
    The pair of the foreign key column and primary key column.
    Name string
    Set only if the foreign key constraint is named.
    ReferencedTables List<GetTableTableConstraintForeignKeyReferencedTable>
    The table that holds the primary key and is referenced by this foreign key.
    ColumnReferences []GetTableTableConstraintForeignKeyColumnReference
    The pair of the foreign key column and primary key column.
    Name string
    Set only if the foreign key constraint is named.
    ReferencedTables []GetTableTableConstraintForeignKeyReferencedTable
    The table that holds the primary key and is referenced by this foreign key.
    columnReferences List<GetTableTableConstraintForeignKeyColumnReference>
    The pair of the foreign key column and primary key column.
    name String
    Set only if the foreign key constraint is named.
    referencedTables List<GetTableTableConstraintForeignKeyReferencedTable>
    The table that holds the primary key and is referenced by this foreign key.
    columnReferences GetTableTableConstraintForeignKeyColumnReference[]
    The pair of the foreign key column and primary key column.
    name string
    Set only if the foreign key constraint is named.
    referencedTables GetTableTableConstraintForeignKeyReferencedTable[]
    The table that holds the primary key and is referenced by this foreign key.
    column_references Sequence[GetTableTableConstraintForeignKeyColumnReference]
    The pair of the foreign key column and primary key column.
    name str
    Set only if the foreign key constraint is named.
    referenced_tables Sequence[GetTableTableConstraintForeignKeyReferencedTable]
    The table that holds the primary key and is referenced by this foreign key.
    columnReferences List<Property Map>
    The pair of the foreign key column and primary key column.
    name String
    Set only if the foreign key constraint is named.
    referencedTables List<Property Map>
    The table that holds the primary key and is referenced by this foreign key.

    GetTableTableConstraintForeignKeyColumnReference

    ReferencedColumn string
    The column in the primary key that are referenced by the referencingColumn.
    ReferencingColumn string
    The column that composes the foreign key.
    ReferencedColumn string
    The column in the primary key that are referenced by the referencingColumn.
    ReferencingColumn string
    The column that composes the foreign key.
    referencedColumn String
    The column in the primary key that are referenced by the referencingColumn.
    referencingColumn String
    The column that composes the foreign key.
    referencedColumn string
    The column in the primary key that are referenced by the referencingColumn.
    referencingColumn string
    The column that composes the foreign key.
    referenced_column str
    The column in the primary key that are referenced by the referencingColumn.
    referencing_column str
    The column that composes the foreign key.
    referencedColumn String
    The column in the primary key that are referenced by the referencingColumn.
    referencingColumn String
    The column that composes the foreign key.

    GetTableTableConstraintForeignKeyReferencedTable

    DatasetId string
    The dataset ID.
    ProjectId string
    The ID of the project containing this table.
    TableId string
    The table ID.
    DatasetId string
    The dataset ID.
    ProjectId string
    The ID of the project containing this table.
    TableId string
    The table ID.
    datasetId String
    The dataset ID.
    projectId String
    The ID of the project containing this table.
    tableId String
    The table ID.
    datasetId string
    The dataset ID.
    projectId string
    The ID of the project containing this table.
    tableId string
    The table ID.
    dataset_id str
    The dataset ID.
    project_id str
    The ID of the project containing this table.
    table_id str
    The table ID.
    datasetId String
    The dataset ID.
    projectId String
    The ID of the project containing this table.
    tableId String
    The table ID.

    GetTableTableConstraintPrimaryKey

    Columns List<string>
    The columns that are composed of the primary key constraint.
    Columns []string
    The columns that are composed of the primary key constraint.
    columns List<String>
    The columns that are composed of the primary key constraint.
    columns string[]
    The columns that are composed of the primary key constraint.
    columns Sequence[str]
    The columns that are composed of the primary key constraint.
    columns List<String>
    The columns that are composed of the primary key constraint.

    GetTableTableReplicationInfo

    ReplicationIntervalMs int
    The interval at which the source materialized view is polled for updates. The default is 300000.
    SourceDatasetId string
    The ID of the source dataset.
    SourceProjectId string
    The ID of the source project.
    SourceTableId string
    The ID of the source materialized view.
    ReplicationIntervalMs int
    The interval at which the source materialized view is polled for updates. The default is 300000.
    SourceDatasetId string
    The ID of the source dataset.
    SourceProjectId string
    The ID of the source project.
    SourceTableId string
    The ID of the source materialized view.
    replicationIntervalMs Integer
    The interval at which the source materialized view is polled for updates. The default is 300000.
    sourceDatasetId String
    The ID of the source dataset.
    sourceProjectId String
    The ID of the source project.
    sourceTableId String
    The ID of the source materialized view.
    replicationIntervalMs number
    The interval at which the source materialized view is polled for updates. The default is 300000.
    sourceDatasetId string
    The ID of the source dataset.
    sourceProjectId string
    The ID of the source project.
    sourceTableId string
    The ID of the source materialized view.
    replication_interval_ms int
    The interval at which the source materialized view is polled for updates. The default is 300000.
    source_dataset_id str
    The ID of the source dataset.
    source_project_id str
    The ID of the source project.
    source_table_id str
    The ID of the source materialized view.
    replicationIntervalMs Number
    The interval at which the source materialized view is polled for updates. The default is 300000.
    sourceDatasetId String
    The ID of the source dataset.
    sourceProjectId String
    The ID of the source project.
    sourceTableId String
    The ID of the source materialized view.

    GetTableTimePartitioning

    ExpirationMs int
    Number of milliseconds for which to keep the storage for a partition.
    Field string
    The field used to determine how to create a time-based partition. If time-based partitioning is enabled without this value, the table is partitioned based on the load time.
    RequirePartitionFilter bool
    If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
    Type string
    The supported types are DAY, HOUR, MONTH, and YEAR, which will generate one partition per day, hour, month, and year, respectively.
    ExpirationMs int
    Number of milliseconds for which to keep the storage for a partition.
    Field string
    The field used to determine how to create a time-based partition. If time-based partitioning is enabled without this value, the table is partitioned based on the load time.
    RequirePartitionFilter bool
    If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
    Type string
    The supported types are DAY, HOUR, MONTH, and YEAR, which will generate one partition per day, hour, month, and year, respectively.
    expirationMs Integer
    Number of milliseconds for which to keep the storage for a partition.
    field String
    The field used to determine how to create a time-based partition. If time-based partitioning is enabled without this value, the table is partitioned based on the load time.
    requirePartitionFilter Boolean
    If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
    type String
    The supported types are DAY, HOUR, MONTH, and YEAR, which will generate one partition per day, hour, month, and year, respectively.
    expirationMs number
    Number of milliseconds for which to keep the storage for a partition.
    field string
    The field used to determine how to create a time-based partition. If time-based partitioning is enabled without this value, the table is partitioned based on the load time.
    requirePartitionFilter boolean
    If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
    type string
    The supported types are DAY, HOUR, MONTH, and YEAR, which will generate one partition per day, hour, month, and year, respectively.
    expiration_ms int
    Number of milliseconds for which to keep the storage for a partition.
    field str
    The field used to determine how to create a time-based partition. If time-based partitioning is enabled without this value, the table is partitioned based on the load time.
    require_partition_filter bool
    If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
    type str
    The supported types are DAY, HOUR, MONTH, and YEAR, which will generate one partition per day, hour, month, and year, respectively.
    expirationMs Number
    Number of milliseconds for which to keep the storage for a partition.
    field String
    The field used to determine how to create a time-based partition. If time-based partitioning is enabled without this value, the table is partitioned based on the load time.
    requirePartitionFilter Boolean
    If set to true, queries over this table require a partition filter that can be used for partition elimination to be specified.
    type String
    The supported types are DAY, HOUR, MONTH, and YEAR, which will generate one partition per day, hour, month, and year, respectively.

    GetTableView

    Query string
    A query that BigQuery executes when the view is referenced.
    UseLegacySql bool
    Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL
    Query string
    A query that BigQuery executes when the view is referenced.
    UseLegacySql bool
    Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL
    query String
    A query that BigQuery executes when the view is referenced.
    useLegacySql Boolean
    Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL
    query string
    A query that BigQuery executes when the view is referenced.
    useLegacySql boolean
    Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL
    query str
    A query that BigQuery executes when the view is referenced.
    use_legacy_sql bool
    Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL
    query String
    A query that BigQuery executes when the view is referenced.
    useLegacySql Boolean
    Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL

    Package Details

    Repository
    Google Cloud (GCP) Classic pulumi/pulumi-gcp
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the google-beta Terraform Provider.
    gcp logo
    Google Cloud v8.34.0 published on Wednesday, Jun 11, 2025 by Pulumi