1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. bigquery
  5. Dataset
Google Cloud Classic v7.19.0 published on Thursday, Apr 18, 2024 by Pulumi

gcp.bigquery.Dataset

Explore with Pulumi AI

gcp logo
Google Cloud Classic v7.19.0 published on Thursday, Apr 18, 2024 by Pulumi

    Example Usage

    Bigquery Dataset Basic

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const bqowner = new gcp.serviceaccount.Account("bqowner", {accountId: "bqowner"});
    const dataset = new gcp.bigquery.Dataset("dataset", {
        datasetId: "example_dataset",
        friendlyName: "test",
        description: "This is a test description",
        location: "EU",
        defaultTableExpirationMs: 3600000,
        labels: {
            env: "default",
        },
        accesses: [
            {
                role: "OWNER",
                userByEmail: bqowner.email,
            },
            {
                role: "READER",
                domain: "hashicorp.com",
            },
        ],
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    bqowner = gcp.serviceaccount.Account("bqowner", account_id="bqowner")
    dataset = gcp.bigquery.Dataset("dataset",
        dataset_id="example_dataset",
        friendly_name="test",
        description="This is a test description",
        location="EU",
        default_table_expiration_ms=3600000,
        labels={
            "env": "default",
        },
        accesses=[
            gcp.bigquery.DatasetAccessArgs(
                role="OWNER",
                user_by_email=bqowner.email,
            ),
            gcp.bigquery.DatasetAccessArgs(
                role="READER",
                domain="hashicorp.com",
            ),
        ])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/serviceaccount"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		bqowner, err := serviceaccount.NewAccount(ctx, "bqowner", &serviceaccount.AccountArgs{
    			AccountId: pulumi.String("bqowner"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
    			DatasetId:                pulumi.String("example_dataset"),
    			FriendlyName:             pulumi.String("test"),
    			Description:              pulumi.String("This is a test description"),
    			Location:                 pulumi.String("EU"),
    			DefaultTableExpirationMs: pulumi.Int(3600000),
    			Labels: pulumi.StringMap{
    				"env": pulumi.String("default"),
    			},
    			Accesses: bigquery.DatasetAccessTypeArray{
    				&bigquery.DatasetAccessTypeArgs{
    					Role:        pulumi.String("OWNER"),
    					UserByEmail: bqowner.Email,
    				},
    				&bigquery.DatasetAccessTypeArgs{
    					Role:   pulumi.String("READER"),
    					Domain: pulumi.String("hashicorp.com"),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var bqowner = new Gcp.ServiceAccount.Account("bqowner", new()
        {
            AccountId = "bqowner",
        });
    
        var dataset = new Gcp.BigQuery.Dataset("dataset", new()
        {
            DatasetId = "example_dataset",
            FriendlyName = "test",
            Description = "This is a test description",
            Location = "EU",
            DefaultTableExpirationMs = 3600000,
            Labels = 
            {
                { "env", "default" },
            },
            Accesses = new[]
            {
                new Gcp.BigQuery.Inputs.DatasetAccessArgs
                {
                    Role = "OWNER",
                    UserByEmail = bqowner.Email,
                },
                new Gcp.BigQuery.Inputs.DatasetAccessArgs
                {
                    Role = "READER",
                    Domain = "hashicorp.com",
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.serviceaccount.Account;
    import com.pulumi.gcp.serviceaccount.AccountArgs;
    import com.pulumi.gcp.bigquery.Dataset;
    import com.pulumi.gcp.bigquery.DatasetArgs;
    import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var bqowner = new Account("bqowner", AccountArgs.builder()        
                .accountId("bqowner")
                .build());
    
            var dataset = new Dataset("dataset", DatasetArgs.builder()        
                .datasetId("example_dataset")
                .friendlyName("test")
                .description("This is a test description")
                .location("EU")
                .defaultTableExpirationMs(3600000)
                .labels(Map.of("env", "default"))
                .accesses(            
                    DatasetAccessArgs.builder()
                        .role("OWNER")
                        .userByEmail(bqowner.email())
                        .build(),
                    DatasetAccessArgs.builder()
                        .role("READER")
                        .domain("hashicorp.com")
                        .build())
                .build());
    
        }
    }
    
    resources:
      dataset:
        type: gcp:bigquery:Dataset
        properties:
          datasetId: example_dataset
          friendlyName: test
          description: This is a test description
          location: EU
          defaultTableExpirationMs: 3.6e+06
          labels:
            env: default
          accesses:
            - role: OWNER
              userByEmail: ${bqowner.email}
            - role: READER
              domain: hashicorp.com
      bqowner:
        type: gcp:serviceaccount:Account
        properties:
          accountId: bqowner
    

    Bigquery Dataset Cmek

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const keyRing = new gcp.kms.KeyRing("key_ring", {
        name: "example-keyring",
        location: "us",
    });
    const cryptoKey = new gcp.kms.CryptoKey("crypto_key", {
        name: "example-key",
        keyRing: keyRing.id,
    });
    const dataset = new gcp.bigquery.Dataset("dataset", {
        datasetId: "example_dataset",
        friendlyName: "test",
        description: "This is a test description",
        location: "US",
        defaultTableExpirationMs: 3600000,
        defaultEncryptionConfiguration: {
            kmsKeyName: cryptoKey.id,
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    key_ring = gcp.kms.KeyRing("key_ring",
        name="example-keyring",
        location="us")
    crypto_key = gcp.kms.CryptoKey("crypto_key",
        name="example-key",
        key_ring=key_ring.id)
    dataset = gcp.bigquery.Dataset("dataset",
        dataset_id="example_dataset",
        friendly_name="test",
        description="This is a test description",
        location="US",
        default_table_expiration_ms=3600000,
        default_encryption_configuration=gcp.bigquery.DatasetDefaultEncryptionConfigurationArgs(
            kms_key_name=crypto_key.id,
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/kms"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		keyRing, err := kms.NewKeyRing(ctx, "key_ring", &kms.KeyRingArgs{
    			Name:     pulumi.String("example-keyring"),
    			Location: pulumi.String("us"),
    		})
    		if err != nil {
    			return err
    		}
    		cryptoKey, err := kms.NewCryptoKey(ctx, "crypto_key", &kms.CryptoKeyArgs{
    			Name:    pulumi.String("example-key"),
    			KeyRing: keyRing.ID(),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
    			DatasetId:                pulumi.String("example_dataset"),
    			FriendlyName:             pulumi.String("test"),
    			Description:              pulumi.String("This is a test description"),
    			Location:                 pulumi.String("US"),
    			DefaultTableExpirationMs: pulumi.Int(3600000),
    			DefaultEncryptionConfiguration: &bigquery.DatasetDefaultEncryptionConfigurationArgs{
    				KmsKeyName: cryptoKey.ID(),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var keyRing = new Gcp.Kms.KeyRing("key_ring", new()
        {
            Name = "example-keyring",
            Location = "us",
        });
    
        var cryptoKey = new Gcp.Kms.CryptoKey("crypto_key", new()
        {
            Name = "example-key",
            KeyRing = keyRing.Id,
        });
    
        var dataset = new Gcp.BigQuery.Dataset("dataset", new()
        {
            DatasetId = "example_dataset",
            FriendlyName = "test",
            Description = "This is a test description",
            Location = "US",
            DefaultTableExpirationMs = 3600000,
            DefaultEncryptionConfiguration = new Gcp.BigQuery.Inputs.DatasetDefaultEncryptionConfigurationArgs
            {
                KmsKeyName = cryptoKey.Id,
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.kms.KeyRing;
    import com.pulumi.gcp.kms.KeyRingArgs;
    import com.pulumi.gcp.kms.CryptoKey;
    import com.pulumi.gcp.kms.CryptoKeyArgs;
    import com.pulumi.gcp.bigquery.Dataset;
    import com.pulumi.gcp.bigquery.DatasetArgs;
    import com.pulumi.gcp.bigquery.inputs.DatasetDefaultEncryptionConfigurationArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var keyRing = new KeyRing("keyRing", KeyRingArgs.builder()        
                .name("example-keyring")
                .location("us")
                .build());
    
            var cryptoKey = new CryptoKey("cryptoKey", CryptoKeyArgs.builder()        
                .name("example-key")
                .keyRing(keyRing.id())
                .build());
    
            var dataset = new Dataset("dataset", DatasetArgs.builder()        
                .datasetId("example_dataset")
                .friendlyName("test")
                .description("This is a test description")
                .location("US")
                .defaultTableExpirationMs(3600000)
                .defaultEncryptionConfiguration(DatasetDefaultEncryptionConfigurationArgs.builder()
                    .kmsKeyName(cryptoKey.id())
                    .build())
                .build());
    
        }
    }
    
    resources:
      dataset:
        type: gcp:bigquery:Dataset
        properties:
          datasetId: example_dataset
          friendlyName: test
          description: This is a test description
          location: US
          defaultTableExpirationMs: 3.6e+06
          defaultEncryptionConfiguration:
            kmsKeyName: ${cryptoKey.id}
      cryptoKey:
        type: gcp:kms:CryptoKey
        name: crypto_key
        properties:
          name: example-key
          keyRing: ${keyRing.id}
      keyRing:
        type: gcp:kms:KeyRing
        name: key_ring
        properties:
          name: example-keyring
          location: us
    

    Bigquery Dataset Authorized Dataset

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const bqowner = new gcp.serviceaccount.Account("bqowner", {accountId: "bqowner"});
    const _public = new gcp.bigquery.Dataset("public", {
        datasetId: "public",
        friendlyName: "test",
        description: "This dataset is public",
        location: "EU",
        defaultTableExpirationMs: 3600000,
        labels: {
            env: "default",
        },
        accesses: [
            {
                role: "OWNER",
                userByEmail: bqowner.email,
            },
            {
                role: "READER",
                domain: "hashicorp.com",
            },
        ],
    });
    const dataset = new gcp.bigquery.Dataset("dataset", {
        datasetId: "private",
        friendlyName: "test",
        description: "This dataset is private",
        location: "EU",
        defaultTableExpirationMs: 3600000,
        labels: {
            env: "default",
        },
        accesses: [
            {
                role: "OWNER",
                userByEmail: bqowner.email,
            },
            {
                role: "READER",
                domain: "hashicorp.com",
            },
            {
                dataset: {
                    dataset: {
                        projectId: _public.project,
                        datasetId: _public.datasetId,
                    },
                    targetTypes: ["VIEWS"],
                },
            },
        ],
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    bqowner = gcp.serviceaccount.Account("bqowner", account_id="bqowner")
    public = gcp.bigquery.Dataset("public",
        dataset_id="public",
        friendly_name="test",
        description="This dataset is public",
        location="EU",
        default_table_expiration_ms=3600000,
        labels={
            "env": "default",
        },
        accesses=[
            gcp.bigquery.DatasetAccessArgs(
                role="OWNER",
                user_by_email=bqowner.email,
            ),
            gcp.bigquery.DatasetAccessArgs(
                role="READER",
                domain="hashicorp.com",
            ),
        ])
    dataset = gcp.bigquery.Dataset("dataset",
        dataset_id="private",
        friendly_name="test",
        description="This dataset is private",
        location="EU",
        default_table_expiration_ms=3600000,
        labels={
            "env": "default",
        },
        accesses=[
            gcp.bigquery.DatasetAccessArgs(
                role="OWNER",
                user_by_email=bqowner.email,
            ),
            gcp.bigquery.DatasetAccessArgs(
                role="READER",
                domain="hashicorp.com",
            ),
            gcp.bigquery.DatasetAccessArgs(
                dataset=gcp.bigquery.DatasetAccessDatasetArgs(
                    dataset=gcp.bigquery.DatasetAccessDatasetDatasetArgs(
                        project_id=public.project,
                        dataset_id=public.dataset_id,
                    ),
                    target_types=["VIEWS"],
                ),
            ),
        ])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/serviceaccount"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		bqowner, err := serviceaccount.NewAccount(ctx, "bqowner", &serviceaccount.AccountArgs{
    			AccountId: pulumi.String("bqowner"),
    		})
    		if err != nil {
    			return err
    		}
    		public, err := bigquery.NewDataset(ctx, "public", &bigquery.DatasetArgs{
    			DatasetId:                pulumi.String("public"),
    			FriendlyName:             pulumi.String("test"),
    			Description:              pulumi.String("This dataset is public"),
    			Location:                 pulumi.String("EU"),
    			DefaultTableExpirationMs: pulumi.Int(3600000),
    			Labels: pulumi.StringMap{
    				"env": pulumi.String("default"),
    			},
    			Accesses: bigquery.DatasetAccessTypeArray{
    				&bigquery.DatasetAccessTypeArgs{
    					Role:        pulumi.String("OWNER"),
    					UserByEmail: bqowner.Email,
    				},
    				&bigquery.DatasetAccessTypeArgs{
    					Role:   pulumi.String("READER"),
    					Domain: pulumi.String("hashicorp.com"),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
    			DatasetId:                pulumi.String("private"),
    			FriendlyName:             pulumi.String("test"),
    			Description:              pulumi.String("This dataset is private"),
    			Location:                 pulumi.String("EU"),
    			DefaultTableExpirationMs: pulumi.Int(3600000),
    			Labels: pulumi.StringMap{
    				"env": pulumi.String("default"),
    			},
    			Accesses: bigquery.DatasetAccessTypeArray{
    				&bigquery.DatasetAccessTypeArgs{
    					Role:        pulumi.String("OWNER"),
    					UserByEmail: bqowner.Email,
    				},
    				&bigquery.DatasetAccessTypeArgs{
    					Role:   pulumi.String("READER"),
    					Domain: pulumi.String("hashicorp.com"),
    				},
    				&bigquery.DatasetAccessTypeArgs{
    					Dataset: &bigquery.DatasetAccessDatasetArgs{
    						Dataset: &bigquery.DatasetAccessDatasetDatasetArgs{
    							ProjectId: public.Project,
    							DatasetId: public.DatasetId,
    						},
    						TargetTypes: pulumi.StringArray{
    							pulumi.String("VIEWS"),
    						},
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var bqowner = new Gcp.ServiceAccount.Account("bqowner", new()
        {
            AccountId = "bqowner",
        });
    
        var @public = new Gcp.BigQuery.Dataset("public", new()
        {
            DatasetId = "public",
            FriendlyName = "test",
            Description = "This dataset is public",
            Location = "EU",
            DefaultTableExpirationMs = 3600000,
            Labels = 
            {
                { "env", "default" },
            },
            Accesses = new[]
            {
                new Gcp.BigQuery.Inputs.DatasetAccessArgs
                {
                    Role = "OWNER",
                    UserByEmail = bqowner.Email,
                },
                new Gcp.BigQuery.Inputs.DatasetAccessArgs
                {
                    Role = "READER",
                    Domain = "hashicorp.com",
                },
            },
        });
    
        var dataset = new Gcp.BigQuery.Dataset("dataset", new()
        {
            DatasetId = "private",
            FriendlyName = "test",
            Description = "This dataset is private",
            Location = "EU",
            DefaultTableExpirationMs = 3600000,
            Labels = 
            {
                { "env", "default" },
            },
            Accesses = new[]
            {
                new Gcp.BigQuery.Inputs.DatasetAccessArgs
                {
                    Role = "OWNER",
                    UserByEmail = bqowner.Email,
                },
                new Gcp.BigQuery.Inputs.DatasetAccessArgs
                {
                    Role = "READER",
                    Domain = "hashicorp.com",
                },
                new Gcp.BigQuery.Inputs.DatasetAccessArgs
                {
                    Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetArgs
                    {
                        Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetDatasetArgs
                        {
                            ProjectId = @public.Project,
                            DatasetId = @public.DatasetId,
                        },
                        TargetTypes = new[]
                        {
                            "VIEWS",
                        },
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.serviceaccount.Account;
    import com.pulumi.gcp.serviceaccount.AccountArgs;
    import com.pulumi.gcp.bigquery.Dataset;
    import com.pulumi.gcp.bigquery.DatasetArgs;
    import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
    import com.pulumi.gcp.bigquery.inputs.DatasetAccessDatasetArgs;
    import com.pulumi.gcp.bigquery.inputs.DatasetAccessDatasetDatasetArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var bqowner = new Account("bqowner", AccountArgs.builder()        
                .accountId("bqowner")
                .build());
    
            var public_ = new Dataset("public", DatasetArgs.builder()        
                .datasetId("public")
                .friendlyName("test")
                .description("This dataset is public")
                .location("EU")
                .defaultTableExpirationMs(3600000)
                .labels(Map.of("env", "default"))
                .accesses(            
                    DatasetAccessArgs.builder()
                        .role("OWNER")
                        .userByEmail(bqowner.email())
                        .build(),
                    DatasetAccessArgs.builder()
                        .role("READER")
                        .domain("hashicorp.com")
                        .build())
                .build());
    
            var dataset = new Dataset("dataset", DatasetArgs.builder()        
                .datasetId("private")
                .friendlyName("test")
                .description("This dataset is private")
                .location("EU")
                .defaultTableExpirationMs(3600000)
                .labels(Map.of("env", "default"))
                .accesses(            
                    DatasetAccessArgs.builder()
                        .role("OWNER")
                        .userByEmail(bqowner.email())
                        .build(),
                    DatasetAccessArgs.builder()
                        .role("READER")
                        .domain("hashicorp.com")
                        .build(),
                    DatasetAccessArgs.builder()
                        .dataset(DatasetAccessDatasetArgs.builder()
                            .dataset(DatasetAccessDatasetDatasetArgs.builder()
                                .projectId(public_.project())
                                .datasetId(public_.datasetId())
                                .build())
                            .targetTypes("VIEWS")
                            .build())
                        .build())
                .build());
    
        }
    }
    
    resources:
      public:
        type: gcp:bigquery:Dataset
        properties:
          datasetId: public
          friendlyName: test
          description: This dataset is public
          location: EU
          defaultTableExpirationMs: 3.6e+06
          labels:
            env: default
          accesses:
            - role: OWNER
              userByEmail: ${bqowner.email}
            - role: READER
              domain: hashicorp.com
      dataset:
        type: gcp:bigquery:Dataset
        properties:
          datasetId: private
          friendlyName: test
          description: This dataset is private
          location: EU
          defaultTableExpirationMs: 3.6e+06
          labels:
            env: default
          accesses:
            - role: OWNER
              userByEmail: ${bqowner.email}
            - role: READER
              domain: hashicorp.com
            - dataset:
                dataset:
                  projectId: ${public.project}
                  datasetId: ${public.datasetId}
                targetTypes:
                  - VIEWS
      bqowner:
        type: gcp:serviceaccount:Account
        properties:
          accountId: bqowner
    

    Bigquery Dataset Authorized Routine

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const _public = new gcp.bigquery.Dataset("public", {
        datasetId: "public_dataset",
        description: "This dataset is public",
    });
    const publicRoutine = new gcp.bigquery.Routine("public", {
        datasetId: _public.datasetId,
        routineId: "public_routine",
        routineType: "TABLE_VALUED_FUNCTION",
        language: "SQL",
        definitionBody: "SELECT 1 + value AS value\n",
        arguments: [{
            name: "value",
            argumentKind: "FIXED_TYPE",
            dataType: JSON.stringify({
                typeKind: "INT64",
            }),
        }],
        returnTableType: JSON.stringify({
            columns: [{
                name: "value",
                type: {
                    typeKind: "INT64",
                },
            }],
        }),
    });
    const _private = new gcp.bigquery.Dataset("private", {
        datasetId: "private_dataset",
        description: "This dataset is private",
        accesses: [
            {
                role: "OWNER",
                userByEmail: "my@service-account.com",
            },
            {
                routine: {
                    projectId: publicRoutine.project,
                    datasetId: publicRoutine.datasetId,
                    routineId: publicRoutine.routineId,
                },
            },
        ],
    });
    
    import pulumi
    import json
    import pulumi_gcp as gcp
    
    public = gcp.bigquery.Dataset("public",
        dataset_id="public_dataset",
        description="This dataset is public")
    public_routine = gcp.bigquery.Routine("public",
        dataset_id=public.dataset_id,
        routine_id="public_routine",
        routine_type="TABLE_VALUED_FUNCTION",
        language="SQL",
        definition_body="SELECT 1 + value AS value\n",
        arguments=[gcp.bigquery.RoutineArgumentArgs(
            name="value",
            argument_kind="FIXED_TYPE",
            data_type=json.dumps({
                "typeKind": "INT64",
            }),
        )],
        return_table_type=json.dumps({
            "columns": [{
                "name": "value",
                "type": {
                    "typeKind": "INT64",
                },
            }],
        }))
    private = gcp.bigquery.Dataset("private",
        dataset_id="private_dataset",
        description="This dataset is private",
        accesses=[
            gcp.bigquery.DatasetAccessArgs(
                role="OWNER",
                user_by_email="my@service-account.com",
            ),
            gcp.bigquery.DatasetAccessArgs(
                routine=gcp.bigquery.DatasetAccessRoutineArgs(
                    project_id=public_routine.project,
                    dataset_id=public_routine.dataset_id,
                    routine_id=public_routine.routine_id,
                ),
            ),
        ])
    
    package main
    
    import (
    	"encoding/json"
    
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		public, err := bigquery.NewDataset(ctx, "public", &bigquery.DatasetArgs{
    			DatasetId:   pulumi.String("public_dataset"),
    			Description: pulumi.String("This dataset is public"),
    		})
    		if err != nil {
    			return err
    		}
    		tmpJSON0, err := json.Marshal(map[string]interface{}{
    			"typeKind": "INT64",
    		})
    		if err != nil {
    			return err
    		}
    		json0 := string(tmpJSON0)
    		tmpJSON1, err := json.Marshal(map[string]interface{}{
    			"columns": []map[string]interface{}{
    				map[string]interface{}{
    					"name": "value",
    					"type": map[string]interface{}{
    						"typeKind": "INT64",
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		json1 := string(tmpJSON1)
    		publicRoutine, err := bigquery.NewRoutine(ctx, "public", &bigquery.RoutineArgs{
    			DatasetId:      public.DatasetId,
    			RoutineId:      pulumi.String("public_routine"),
    			RoutineType:    pulumi.String("TABLE_VALUED_FUNCTION"),
    			Language:       pulumi.String("SQL"),
    			DefinitionBody: pulumi.String("SELECT 1 + value AS value\n"),
    			Arguments: bigquery.RoutineArgumentArray{
    				&bigquery.RoutineArgumentArgs{
    					Name:         pulumi.String("value"),
    					ArgumentKind: pulumi.String("FIXED_TYPE"),
    					DataType:     pulumi.String(json0),
    				},
    			},
    			ReturnTableType: pulumi.String(json1),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = bigquery.NewDataset(ctx, "private", &bigquery.DatasetArgs{
    			DatasetId:   pulumi.String("private_dataset"),
    			Description: pulumi.String("This dataset is private"),
    			Accesses: bigquery.DatasetAccessTypeArray{
    				&bigquery.DatasetAccessTypeArgs{
    					Role:        pulumi.String("OWNER"),
    					UserByEmail: pulumi.String("my@service-account.com"),
    				},
    				&bigquery.DatasetAccessTypeArgs{
    					Routine: &bigquery.DatasetAccessRoutineArgs{
    						ProjectId: publicRoutine.Project,
    						DatasetId: publicRoutine.DatasetId,
    						RoutineId: publicRoutine.RoutineId,
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using System.Text.Json;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var @public = new Gcp.BigQuery.Dataset("public", new()
        {
            DatasetId = "public_dataset",
            Description = "This dataset is public",
        });
    
        var publicRoutine = new Gcp.BigQuery.Routine("public", new()
        {
            DatasetId = @public.DatasetId,
            RoutineId = "public_routine",
            RoutineType = "TABLE_VALUED_FUNCTION",
            Language = "SQL",
            DefinitionBody = @"SELECT 1 + value AS value
    ",
            Arguments = new[]
            {
                new Gcp.BigQuery.Inputs.RoutineArgumentArgs
                {
                    Name = "value",
                    ArgumentKind = "FIXED_TYPE",
                    DataType = JsonSerializer.Serialize(new Dictionary<string, object?>
                    {
                        ["typeKind"] = "INT64",
                    }),
                },
            },
            ReturnTableType = JsonSerializer.Serialize(new Dictionary<string, object?>
            {
                ["columns"] = new[]
                {
                    new Dictionary<string, object?>
                    {
                        ["name"] = "value",
                        ["type"] = new Dictionary<string, object?>
                        {
                            ["typeKind"] = "INT64",
                        },
                    },
                },
            }),
        });
    
        var @private = new Gcp.BigQuery.Dataset("private", new()
        {
            DatasetId = "private_dataset",
            Description = "This dataset is private",
            Accesses = new[]
            {
                new Gcp.BigQuery.Inputs.DatasetAccessArgs
                {
                    Role = "OWNER",
                    UserByEmail = "my@service-account.com",
                },
                new Gcp.BigQuery.Inputs.DatasetAccessArgs
                {
                    Routine = new Gcp.BigQuery.Inputs.DatasetAccessRoutineArgs
                    {
                        ProjectId = publicRoutine.Project,
                        DatasetId = publicRoutine.DatasetId,
                        RoutineId = publicRoutine.RoutineId,
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.bigquery.Dataset;
    import com.pulumi.gcp.bigquery.DatasetArgs;
    import com.pulumi.gcp.bigquery.Routine;
    import com.pulumi.gcp.bigquery.RoutineArgs;
    import com.pulumi.gcp.bigquery.inputs.RoutineArgumentArgs;
    import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
    import com.pulumi.gcp.bigquery.inputs.DatasetAccessRoutineArgs;
    import static com.pulumi.codegen.internal.Serialization.*;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var public_ = new Dataset("public", DatasetArgs.builder()        
                .datasetId("public_dataset")
                .description("This dataset is public")
                .build());
    
            var publicRoutine = new Routine("publicRoutine", RoutineArgs.builder()        
                .datasetId(public_.datasetId())
                .routineId("public_routine")
                .routineType("TABLE_VALUED_FUNCTION")
                .language("SQL")
                .definitionBody("""
    SELECT 1 + value AS value
                """)
                .arguments(RoutineArgumentArgs.builder()
                    .name("value")
                    .argumentKind("FIXED_TYPE")
                    .dataType(serializeJson(
                        jsonObject(
                            jsonProperty("typeKind", "INT64")
                        )))
                    .build())
                .returnTableType(serializeJson(
                    jsonObject(
                        jsonProperty("columns", jsonArray(jsonObject(
                            jsonProperty("name", "value"),
                            jsonProperty("type", jsonObject(
                                jsonProperty("typeKind", "INT64")
                            ))
                        )))
                    )))
                .build());
    
            var private_ = new Dataset("private", DatasetArgs.builder()        
                .datasetId("private_dataset")
                .description("This dataset is private")
                .accesses(            
                    DatasetAccessArgs.builder()
                        .role("OWNER")
                        .userByEmail("my@service-account.com")
                        .build(),
                    DatasetAccessArgs.builder()
                        .routine(DatasetAccessRoutineArgs.builder()
                            .projectId(publicRoutine.project())
                            .datasetId(publicRoutine.datasetId())
                            .routineId(publicRoutine.routineId())
                            .build())
                        .build())
                .build());
    
        }
    }
    
    resources:
      public:
        type: gcp:bigquery:Dataset
        properties:
          datasetId: public_dataset
          description: This dataset is public
      publicRoutine:
        type: gcp:bigquery:Routine
        name: public
        properties:
          datasetId: ${public.datasetId}
          routineId: public_routine
          routineType: TABLE_VALUED_FUNCTION
          language: SQL
          definitionBody: |
            SELECT 1 + value AS value        
          arguments:
            - name: value
              argumentKind: FIXED_TYPE
              dataType:
                fn::toJSON:
                  typeKind: INT64
          returnTableType:
            fn::toJSON:
              columns:
                - name: value
                  type:
                    typeKind: INT64
      private:
        type: gcp:bigquery:Dataset
        properties:
          datasetId: private_dataset
          description: This dataset is private
          accesses:
            - role: OWNER
              userByEmail: my@service-account.com
            - routine:
                projectId: ${publicRoutine.project}
                datasetId: ${publicRoutine.datasetId}
                routineId: ${publicRoutine.routineId}
    

    Bigquery Dataset External Reference Aws Docs

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const dataset = new gcp.bigquery.Dataset("dataset", {
        datasetId: "example_dataset",
        friendlyName: "test",
        description: "This is a test description",
        location: "aws-us-east-1",
        externalDatasetReference: {
            externalSource: "aws-glue://arn:aws:glue:us-east-1:999999999999:database/database",
            connection: "projects/project/locations/aws-us-east-1/connections/connection",
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    dataset = gcp.bigquery.Dataset("dataset",
        dataset_id="example_dataset",
        friendly_name="test",
        description="This is a test description",
        location="aws-us-east-1",
        external_dataset_reference=gcp.bigquery.DatasetExternalDatasetReferenceArgs(
            external_source="aws-glue://arn:aws:glue:us-east-1:999999999999:database/database",
            connection="projects/project/locations/aws-us-east-1/connections/connection",
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/bigquery"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
    			DatasetId:    pulumi.String("example_dataset"),
    			FriendlyName: pulumi.String("test"),
    			Description:  pulumi.String("This is a test description"),
    			Location:     pulumi.String("aws-us-east-1"),
    			ExternalDatasetReference: &bigquery.DatasetExternalDatasetReferenceArgs{
    				ExternalSource: pulumi.String("aws-glue://arn:aws:glue:us-east-1:999999999999:database/database"),
    				Connection:     pulumi.String("projects/project/locations/aws-us-east-1/connections/connection"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var dataset = new Gcp.BigQuery.Dataset("dataset", new()
        {
            DatasetId = "example_dataset",
            FriendlyName = "test",
            Description = "This is a test description",
            Location = "aws-us-east-1",
            ExternalDatasetReference = new Gcp.BigQuery.Inputs.DatasetExternalDatasetReferenceArgs
            {
                ExternalSource = "aws-glue://arn:aws:glue:us-east-1:999999999999:database/database",
                Connection = "projects/project/locations/aws-us-east-1/connections/connection",
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.bigquery.Dataset;
    import com.pulumi.gcp.bigquery.DatasetArgs;
    import com.pulumi.gcp.bigquery.inputs.DatasetExternalDatasetReferenceArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var dataset = new Dataset("dataset", DatasetArgs.builder()        
                .datasetId("example_dataset")
                .friendlyName("test")
                .description("This is a test description")
                .location("aws-us-east-1")
                .externalDatasetReference(DatasetExternalDatasetReferenceArgs.builder()
                    .externalSource("aws-glue://arn:aws:glue:us-east-1:999999999999:database/database")
                    .connection("projects/project/locations/aws-us-east-1/connections/connection")
                    .build())
                .build());
    
        }
    }
    
    resources:
      dataset:
        type: gcp:bigquery:Dataset
        properties:
          datasetId: example_dataset
          friendlyName: test
          description: This is a test description
          location: aws-us-east-1
          externalDatasetReference:
            externalSource: aws-glue://arn:aws:glue:us-east-1:999999999999:database/database
            connection: projects/project/locations/aws-us-east-1/connections/connection
    

    Create Dataset Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Dataset(name: string, args: DatasetArgs, opts?: CustomResourceOptions);
    @overload
    def Dataset(resource_name: str,
                args: DatasetArgs,
                opts: Optional[ResourceOptions] = None)
    
    @overload
    def Dataset(resource_name: str,
                opts: Optional[ResourceOptions] = None,
                dataset_id: Optional[str] = None,
                external_dataset_reference: Optional[DatasetExternalDatasetReferenceArgs] = None,
                friendly_name: Optional[str] = None,
                default_encryption_configuration: Optional[DatasetDefaultEncryptionConfigurationArgs] = None,
                default_partition_expiration_ms: Optional[int] = None,
                default_table_expiration_ms: Optional[int] = None,
                delete_contents_on_destroy: Optional[bool] = None,
                default_collation: Optional[str] = None,
                accesses: Optional[Sequence[DatasetAccessArgs]] = None,
                description: Optional[str] = None,
                is_case_insensitive: Optional[bool] = None,
                labels: Optional[Mapping[str, str]] = None,
                location: Optional[str] = None,
                max_time_travel_hours: Optional[str] = None,
                project: Optional[str] = None,
                storage_billing_model: Optional[str] = None)
    func NewDataset(ctx *Context, name string, args DatasetArgs, opts ...ResourceOption) (*Dataset, error)
    public Dataset(string name, DatasetArgs args, CustomResourceOptions? opts = null)
    public Dataset(String name, DatasetArgs args)
    public Dataset(String name, DatasetArgs args, CustomResourceOptions options)
    
    type: gcp:bigquery:Dataset
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args DatasetArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args DatasetArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args DatasetArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args DatasetArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args DatasetArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Example

    The following reference example uses placeholder values for all input properties.

    var datasetResource = new Gcp.BigQuery.Dataset("datasetResource", new()
    {
        DatasetId = "string",
        ExternalDatasetReference = new Gcp.BigQuery.Inputs.DatasetExternalDatasetReferenceArgs
        {
            Connection = "string",
            ExternalSource = "string",
        },
        FriendlyName = "string",
        DefaultEncryptionConfiguration = new Gcp.BigQuery.Inputs.DatasetDefaultEncryptionConfigurationArgs
        {
            KmsKeyName = "string",
        },
        DefaultPartitionExpirationMs = 0,
        DefaultTableExpirationMs = 0,
        DeleteContentsOnDestroy = false,
        DefaultCollation = "string",
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetArgs
                {
                    Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetDatasetArgs
                    {
                        DatasetId = "string",
                        ProjectId = "string",
                    },
                    TargetTypes = new[]
                    {
                        "string",
                    },
                },
                Domain = "string",
                GroupByEmail = "string",
                IamMember = "string",
                Role = "string",
                Routine = new Gcp.BigQuery.Inputs.DatasetAccessRoutineArgs
                {
                    DatasetId = "string",
                    ProjectId = "string",
                    RoutineId = "string",
                },
                SpecialGroup = "string",
                UserByEmail = "string",
                View = new Gcp.BigQuery.Inputs.DatasetAccessViewArgs
                {
                    DatasetId = "string",
                    ProjectId = "string",
                    TableId = "string",
                },
            },
        },
        Description = "string",
        IsCaseInsensitive = false,
        Labels = 
        {
            { "string", "string" },
        },
        Location = "string",
        MaxTimeTravelHours = "string",
        Project = "string",
        StorageBillingModel = "string",
    });
    
    example, err := bigquery.NewDataset(ctx, "datasetResource", &bigquery.DatasetArgs{
    	DatasetId: pulumi.String("string"),
    	ExternalDatasetReference: &bigquery.DatasetExternalDatasetReferenceArgs{
    		Connection:     pulumi.String("string"),
    		ExternalSource: pulumi.String("string"),
    	},
    	FriendlyName: pulumi.String("string"),
    	DefaultEncryptionConfiguration: &bigquery.DatasetDefaultEncryptionConfigurationArgs{
    		KmsKeyName: pulumi.String("string"),
    	},
    	DefaultPartitionExpirationMs: pulumi.Int(0),
    	DefaultTableExpirationMs:     pulumi.Int(0),
    	DeleteContentsOnDestroy:      pulumi.Bool(false),
    	DefaultCollation:             pulumi.String("string"),
    	Accesses: bigquery.DatasetAccessTypeArray{
    		&bigquery.DatasetAccessTypeArgs{
    			Dataset: &bigquery.DatasetAccessDatasetArgs{
    				Dataset: &bigquery.DatasetAccessDatasetDatasetArgs{
    					DatasetId: pulumi.String("string"),
    					ProjectId: pulumi.String("string"),
    				},
    				TargetTypes: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    			},
    			Domain:       pulumi.String("string"),
    			GroupByEmail: pulumi.String("string"),
    			IamMember:    pulumi.String("string"),
    			Role:         pulumi.String("string"),
    			Routine: &bigquery.DatasetAccessRoutineArgs{
    				DatasetId: pulumi.String("string"),
    				ProjectId: pulumi.String("string"),
    				RoutineId: pulumi.String("string"),
    			},
    			SpecialGroup: pulumi.String("string"),
    			UserByEmail:  pulumi.String("string"),
    			View: &bigquery.DatasetAccessViewArgs{
    				DatasetId: pulumi.String("string"),
    				ProjectId: pulumi.String("string"),
    				TableId:   pulumi.String("string"),
    			},
    		},
    	},
    	Description:       pulumi.String("string"),
    	IsCaseInsensitive: pulumi.Bool(false),
    	Labels: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	Location:            pulumi.String("string"),
    	MaxTimeTravelHours:  pulumi.String("string"),
    	Project:             pulumi.String("string"),
    	StorageBillingModel: pulumi.String("string"),
    })
    
    var datasetResource = new Dataset("datasetResource", DatasetArgs.builder()        
        .datasetId("string")
        .externalDatasetReference(DatasetExternalDatasetReferenceArgs.builder()
            .connection("string")
            .externalSource("string")
            .build())
        .friendlyName("string")
        .defaultEncryptionConfiguration(DatasetDefaultEncryptionConfigurationArgs.builder()
            .kmsKeyName("string")
            .build())
        .defaultPartitionExpirationMs(0)
        .defaultTableExpirationMs(0)
        .deleteContentsOnDestroy(false)
        .defaultCollation("string")
        .accesses(DatasetAccessArgs.builder()
            .dataset(DatasetAccessDatasetArgs.builder()
                .dataset(DatasetAccessDatasetDatasetArgs.builder()
                    .datasetId("string")
                    .projectId("string")
                    .build())
                .targetTypes("string")
                .build())
            .domain("string")
            .groupByEmail("string")
            .iamMember("string")
            .role("string")
            .routine(DatasetAccessRoutineArgs.builder()
                .datasetId("string")
                .projectId("string")
                .routineId("string")
                .build())
            .specialGroup("string")
            .userByEmail("string")
            .view(DatasetAccessViewArgs.builder()
                .datasetId("string")
                .projectId("string")
                .tableId("string")
                .build())
            .build())
        .description("string")
        .isCaseInsensitive(false)
        .labels(Map.of("string", "string"))
        .location("string")
        .maxTimeTravelHours("string")
        .project("string")
        .storageBillingModel("string")
        .build());
    
    dataset_resource = gcp.bigquery.Dataset("datasetResource",
        dataset_id="string",
        external_dataset_reference=gcp.bigquery.DatasetExternalDatasetReferenceArgs(
            connection="string",
            external_source="string",
        ),
        friendly_name="string",
        default_encryption_configuration=gcp.bigquery.DatasetDefaultEncryptionConfigurationArgs(
            kms_key_name="string",
        ),
        default_partition_expiration_ms=0,
        default_table_expiration_ms=0,
        delete_contents_on_destroy=False,
        default_collation="string",
        accesses=[gcp.bigquery.DatasetAccessArgs(
            dataset=gcp.bigquery.DatasetAccessDatasetArgs(
                dataset=gcp.bigquery.DatasetAccessDatasetDatasetArgs(
                    dataset_id="string",
                    project_id="string",
                ),
                target_types=["string"],
            ),
            domain="string",
            group_by_email="string",
            iam_member="string",
            role="string",
            routine=gcp.bigquery.DatasetAccessRoutineArgs(
                dataset_id="string",
                project_id="string",
                routine_id="string",
            ),
            special_group="string",
            user_by_email="string",
            view=gcp.bigquery.DatasetAccessViewArgs(
                dataset_id="string",
                project_id="string",
                table_id="string",
            ),
        )],
        description="string",
        is_case_insensitive=False,
        labels={
            "string": "string",
        },
        location="string",
        max_time_travel_hours="string",
        project="string",
        storage_billing_model="string")
    
    const datasetResource = new gcp.bigquery.Dataset("datasetResource", {
        datasetId: "string",
        externalDatasetReference: {
            connection: "string",
            externalSource: "string",
        },
        friendlyName: "string",
        defaultEncryptionConfiguration: {
            kmsKeyName: "string",
        },
        defaultPartitionExpirationMs: 0,
        defaultTableExpirationMs: 0,
        deleteContentsOnDestroy: false,
        defaultCollation: "string",
        accesses: [{
            dataset: {
                dataset: {
                    datasetId: "string",
                    projectId: "string",
                },
                targetTypes: ["string"],
            },
            domain: "string",
            groupByEmail: "string",
            iamMember: "string",
            role: "string",
            routine: {
                datasetId: "string",
                projectId: "string",
                routineId: "string",
            },
            specialGroup: "string",
            userByEmail: "string",
            view: {
                datasetId: "string",
                projectId: "string",
                tableId: "string",
            },
        }],
        description: "string",
        isCaseInsensitive: false,
        labels: {
            string: "string",
        },
        location: "string",
        maxTimeTravelHours: "string",
        project: "string",
        storageBillingModel: "string",
    });
    
    type: gcp:bigquery:Dataset
    properties:
        accesses:
            - dataset:
                dataset:
                    datasetId: string
                    projectId: string
                targetTypes:
                    - string
              domain: string
              groupByEmail: string
              iamMember: string
              role: string
              routine:
                datasetId: string
                projectId: string
                routineId: string
              specialGroup: string
              userByEmail: string
              view:
                datasetId: string
                projectId: string
                tableId: string
        datasetId: string
        defaultCollation: string
        defaultEncryptionConfiguration:
            kmsKeyName: string
        defaultPartitionExpirationMs: 0
        defaultTableExpirationMs: 0
        deleteContentsOnDestroy: false
        description: string
        externalDatasetReference:
            connection: string
            externalSource: string
        friendlyName: string
        isCaseInsensitive: false
        labels:
            string: string
        location: string
        maxTimeTravelHours: string
        project: string
        storageBillingModel: string
    

    Dataset Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The Dataset resource accepts the following input properties:

    DatasetId string
    A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


    Accesses List<DatasetAccess>
    An array of objects that define dataset access for one or more entities. Structure is documented below.
    DefaultCollation string
    Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

    • 'und:ci': undetermined locale, case insensitive.
    • '': empty string. Default to case-sensitive behavior.
    DefaultEncryptionConfiguration DatasetDefaultEncryptionConfiguration
    The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
    DefaultPartitionExpirationMs int

    The default partition expiration for all partitioned tables in the dataset, in milliseconds.

    Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.

    DefaultTableExpirationMs int

    The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

    Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.

    DeleteContentsOnDestroy bool
    If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
    Description string
    A user-friendly description of the dataset
    ExternalDatasetReference DatasetExternalDatasetReference
    Information about the external metadata storage where the dataset is defined. Structure is documented below.
    FriendlyName string
    A descriptive name for the dataset
    IsCaseInsensitive bool
    TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
    Labels Dictionary<string, string>

    The labels associated with this dataset. You can use these to organize and group your datasets.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    Location string

    The geographic location where the dataset should reside. See official docs.

    There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places.

    The default value is multi-regional location US. Changing this forces a new resource to be created.

    MaxTimeTravelHours string
    Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
    Project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    StorageBillingModel string
    Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
    DatasetId string
    A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


    Accesses []DatasetAccessTypeArgs
    An array of objects that define dataset access for one or more entities. Structure is documented below.
    DefaultCollation string
    Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

    • 'und:ci': undetermined locale, case insensitive.
    • '': empty string. Default to case-sensitive behavior.
    DefaultEncryptionConfiguration DatasetDefaultEncryptionConfigurationArgs
    The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
    DefaultPartitionExpirationMs int

    The default partition expiration for all partitioned tables in the dataset, in milliseconds.

    Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.

    DefaultTableExpirationMs int

    The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

    Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.

    DeleteContentsOnDestroy bool
    If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
    Description string
    A user-friendly description of the dataset
    ExternalDatasetReference DatasetExternalDatasetReferenceArgs
    Information about the external metadata storage where the dataset is defined. Structure is documented below.
    FriendlyName string
    A descriptive name for the dataset
    IsCaseInsensitive bool
    TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
    Labels map[string]string

    The labels associated with this dataset. You can use these to organize and group your datasets.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    Location string

    The geographic location where the dataset should reside. See official docs.

    There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places.

    The default value is multi-regional location US. Changing this forces a new resource to be created.

    MaxTimeTravelHours string
    Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
    Project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    StorageBillingModel string
    Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
    datasetId String
    A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


    accesses List<DatasetAccess>
    An array of objects that define dataset access for one or more entities. Structure is documented below.
    defaultCollation String
    Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

    • 'und:ci': undetermined locale, case insensitive.
    • '': empty string. Default to case-sensitive behavior.
    defaultEncryptionConfiguration DatasetDefaultEncryptionConfiguration
    The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
    defaultPartitionExpirationMs Integer

    The default partition expiration for all partitioned tables in the dataset, in milliseconds.

    Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.

    defaultTableExpirationMs Integer

    The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

    Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.

    deleteContentsOnDestroy Boolean
    If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
    description String
    A user-friendly description of the dataset
    externalDatasetReference DatasetExternalDatasetReference
    Information about the external metadata storage where the dataset is defined. Structure is documented below.
    friendlyName String
    A descriptive name for the dataset
    isCaseInsensitive Boolean
    TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
    labels Map<String,String>

    The labels associated with this dataset. You can use these to organize and group your datasets.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    location String

    The geographic location where the dataset should reside. See official docs.

    There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places.

    The default value is multi-regional location US. Changing this forces a new resource to be created.

    maxTimeTravelHours String
    Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
    project String
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    storageBillingModel String
    Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
    datasetId string
    A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


    accesses DatasetAccess[]
    An array of objects that define dataset access for one or more entities. Structure is documented below.
    defaultCollation string
    Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

    • 'und:ci': undetermined locale, case insensitive.
    • '': empty string. Default to case-sensitive behavior.
    defaultEncryptionConfiguration DatasetDefaultEncryptionConfiguration
    The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
    defaultPartitionExpirationMs number

    The default partition expiration for all partitioned tables in the dataset, in milliseconds.

    Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.

    defaultTableExpirationMs number

    The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

    Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.

    deleteContentsOnDestroy boolean
    If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
    description string
    A user-friendly description of the dataset
    externalDatasetReference DatasetExternalDatasetReference
    Information about the external metadata storage where the dataset is defined. Structure is documented below.
    friendlyName string
    A descriptive name for the dataset
    isCaseInsensitive boolean
    TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
    labels {[key: string]: string}

    The labels associated with this dataset. You can use these to organize and group your datasets.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    location string

    The geographic location where the dataset should reside. See official docs.

    There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places.

    The default value is multi-regional location US. Changing this forces a new resource to be created.

    maxTimeTravelHours string
    Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
    project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    storageBillingModel string
    Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
    dataset_id str
    A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


    accesses Sequence[DatasetAccessArgs]
    An array of objects that define dataset access for one or more entities. Structure is documented below.
    default_collation str
    Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

    • 'und:ci': undetermined locale, case insensitive.
    • '': empty string. Default to case-sensitive behavior.
    default_encryption_configuration DatasetDefaultEncryptionConfigurationArgs
    The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
    default_partition_expiration_ms int

    The default partition expiration for all partitioned tables in the dataset, in milliseconds.

    Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.

    default_table_expiration_ms int

    The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

    Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.

    delete_contents_on_destroy bool
    If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
    description str
    A user-friendly description of the dataset
    external_dataset_reference DatasetExternalDatasetReferenceArgs
    Information about the external metadata storage where the dataset is defined. Structure is documented below.
    friendly_name str
    A descriptive name for the dataset
    is_case_insensitive bool
    TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
    labels Mapping[str, str]

    The labels associated with this dataset. You can use these to organize and group your datasets.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    location str

    The geographic location where the dataset should reside. See official docs.

    There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places.

    The default value is multi-regional location US. Changing this forces a new resource to be created.

    max_time_travel_hours str
    Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
    project str
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    storage_billing_model str
    Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
    datasetId String
    A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


    accesses List<Property Map>
    An array of objects that define dataset access for one or more entities. Structure is documented below.
    defaultCollation String
    Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

    • 'und:ci': undetermined locale, case insensitive.
    • '': empty string. Default to case-sensitive behavior.
    defaultEncryptionConfiguration Property Map
    The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
    defaultPartitionExpirationMs Number

    The default partition expiration for all partitioned tables in the dataset, in milliseconds.

    Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.

    defaultTableExpirationMs Number

    The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

    Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.

    deleteContentsOnDestroy Boolean
    If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
    description String
    A user-friendly description of the dataset
    externalDatasetReference Property Map
    Information about the external metadata storage where the dataset is defined. Structure is documented below.
    friendlyName String
    A descriptive name for the dataset
    isCaseInsensitive Boolean
    TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
    labels Map<String>

    The labels associated with this dataset. You can use these to organize and group your datasets.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    location String

    The geographic location where the dataset should reside. See official docs.

    There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places.

    The default value is multi-regional location US. Changing this forces a new resource to be created.

    maxTimeTravelHours String
    Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
    project String
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    storageBillingModel String
    Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the Dataset resource produces the following output properties:

    CreationTime int
    The time when this dataset was created, in milliseconds since the epoch.
    EffectiveLabels Dictionary<string, string>
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    Etag string
    A hash of the resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    LastModifiedTime int
    The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
    PulumiLabels Dictionary<string, string>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    SelfLink string
    The URI of the created resource.
    CreationTime int
    The time when this dataset was created, in milliseconds since the epoch.
    EffectiveLabels map[string]string
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    Etag string
    A hash of the resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    LastModifiedTime int
    The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
    PulumiLabels map[string]string
    The combination of labels configured directly on the resource and default labels configured on the provider.
    SelfLink string
    The URI of the created resource.
    creationTime Integer
    The time when this dataset was created, in milliseconds since the epoch.
    effectiveLabels Map<String,String>
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    etag String
    A hash of the resource.
    id String
    The provider-assigned unique ID for this managed resource.
    lastModifiedTime Integer
    The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
    pulumiLabels Map<String,String>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    selfLink String
    The URI of the created resource.
    creationTime number
    The time when this dataset was created, in milliseconds since the epoch.
    effectiveLabels {[key: string]: string}
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    etag string
    A hash of the resource.
    id string
    The provider-assigned unique ID for this managed resource.
    lastModifiedTime number
    The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
    pulumiLabels {[key: string]: string}
    The combination of labels configured directly on the resource and default labels configured on the provider.
    selfLink string
    The URI of the created resource.
    creation_time int
    The time when this dataset was created, in milliseconds since the epoch.
    effective_labels Mapping[str, str]
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    etag str
    A hash of the resource.
    id str
    The provider-assigned unique ID for this managed resource.
    last_modified_time int
    The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
    pulumi_labels Mapping[str, str]
    The combination of labels configured directly on the resource and default labels configured on the provider.
    self_link str
    The URI of the created resource.
    creationTime Number
    The time when this dataset was created, in milliseconds since the epoch.
    effectiveLabels Map<String>
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    etag String
    A hash of the resource.
    id String
    The provider-assigned unique ID for this managed resource.
    lastModifiedTime Number
    The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
    pulumiLabels Map<String>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    selfLink String
    The URI of the created resource.

    Look up Existing Dataset Resource

    Get an existing Dataset resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: DatasetState, opts?: CustomResourceOptions): Dataset
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            accesses: Optional[Sequence[DatasetAccessArgs]] = None,
            creation_time: Optional[int] = None,
            dataset_id: Optional[str] = None,
            default_collation: Optional[str] = None,
            default_encryption_configuration: Optional[DatasetDefaultEncryptionConfigurationArgs] = None,
            default_partition_expiration_ms: Optional[int] = None,
            default_table_expiration_ms: Optional[int] = None,
            delete_contents_on_destroy: Optional[bool] = None,
            description: Optional[str] = None,
            effective_labels: Optional[Mapping[str, str]] = None,
            etag: Optional[str] = None,
            external_dataset_reference: Optional[DatasetExternalDatasetReferenceArgs] = None,
            friendly_name: Optional[str] = None,
            is_case_insensitive: Optional[bool] = None,
            labels: Optional[Mapping[str, str]] = None,
            last_modified_time: Optional[int] = None,
            location: Optional[str] = None,
            max_time_travel_hours: Optional[str] = None,
            project: Optional[str] = None,
            pulumi_labels: Optional[Mapping[str, str]] = None,
            self_link: Optional[str] = None,
            storage_billing_model: Optional[str] = None) -> Dataset
    func GetDataset(ctx *Context, name string, id IDInput, state *DatasetState, opts ...ResourceOption) (*Dataset, error)
    public static Dataset Get(string name, Input<string> id, DatasetState? state, CustomResourceOptions? opts = null)
    public static Dataset get(String name, Output<String> id, DatasetState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Accesses List<DatasetAccess>
    An array of objects that define dataset access for one or more entities. Structure is documented below.
    CreationTime int
    The time when this dataset was created, in milliseconds since the epoch.
    DatasetId string
    A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


    DefaultCollation string
    Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

    • 'und:ci': undetermined locale, case insensitive.
    • '': empty string. Default to case-sensitive behavior.
    DefaultEncryptionConfiguration DatasetDefaultEncryptionConfiguration
    The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
    DefaultPartitionExpirationMs int

    The default partition expiration for all partitioned tables in the dataset, in milliseconds.

    Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.

    DefaultTableExpirationMs int

    The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

    Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.

    DeleteContentsOnDestroy bool
    If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
    Description string
    A user-friendly description of the dataset
    EffectiveLabels Dictionary<string, string>
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    Etag string
    A hash of the resource.
    ExternalDatasetReference DatasetExternalDatasetReference
    Information about the external metadata storage where the dataset is defined. Structure is documented below.
    FriendlyName string
    A descriptive name for the dataset
    IsCaseInsensitive bool
    TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
    Labels Dictionary<string, string>

    The labels associated with this dataset. You can use these to organize and group your datasets.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    LastModifiedTime int
    The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
    Location string

    The geographic location where the dataset should reside. See official docs.

    There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places.

    The default value is multi-regional location US. Changing this forces a new resource to be created.

    MaxTimeTravelHours string
    Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
    Project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    PulumiLabels Dictionary<string, string>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    SelfLink string
    The URI of the created resource.
    StorageBillingModel string
    Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
    Accesses []DatasetAccessTypeArgs
    An array of objects that define dataset access for one or more entities. Structure is documented below.
    CreationTime int
    The time when this dataset was created, in milliseconds since the epoch.
    DatasetId string
    A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


    DefaultCollation string
    Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

    • 'und:ci': undetermined locale, case insensitive.
    • '': empty string. Default to case-sensitive behavior.
    DefaultEncryptionConfiguration DatasetDefaultEncryptionConfigurationArgs
    The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
    DefaultPartitionExpirationMs int

    The default partition expiration for all partitioned tables in the dataset, in milliseconds.

    Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.

    DefaultTableExpirationMs int

    The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

    Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.

    DeleteContentsOnDestroy bool
    If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
    Description string
    A user-friendly description of the dataset
    EffectiveLabels map[string]string
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    Etag string
    A hash of the resource.
    ExternalDatasetReference DatasetExternalDatasetReferenceArgs
    Information about the external metadata storage where the dataset is defined. Structure is documented below.
    FriendlyName string
    A descriptive name for the dataset
    IsCaseInsensitive bool
    TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
    Labels map[string]string

    The labels associated with this dataset. You can use these to organize and group your datasets.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    LastModifiedTime int
    The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
    Location string

    The geographic location where the dataset should reside. See official docs.

    There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places.

    The default value is multi-regional location US. Changing this forces a new resource to be created.

    MaxTimeTravelHours string
    Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
    Project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    PulumiLabels map[string]string
    The combination of labels configured directly on the resource and default labels configured on the provider.
    SelfLink string
    The URI of the created resource.
    StorageBillingModel string
    Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
    accesses List<DatasetAccess>
    An array of objects that define dataset access for one or more entities. Structure is documented below.
    creationTime Integer
    The time when this dataset was created, in milliseconds since the epoch.
    datasetId String
    A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


    defaultCollation String
    Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

    • 'und:ci': undetermined locale, case insensitive.
    • '': empty string. Default to case-sensitive behavior.
    defaultEncryptionConfiguration DatasetDefaultEncryptionConfiguration
    The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
    defaultPartitionExpirationMs Integer

    The default partition expiration for all partitioned tables in the dataset, in milliseconds.

    Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.

    defaultTableExpirationMs Integer

    The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

    Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.

    deleteContentsOnDestroy Boolean
    If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
    description String
    A user-friendly description of the dataset
    effectiveLabels Map<String,String>
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    etag String
    A hash of the resource.
    externalDatasetReference DatasetExternalDatasetReference
    Information about the external metadata storage where the dataset is defined. Structure is documented below.
    friendlyName String
    A descriptive name for the dataset
    isCaseInsensitive Boolean
    TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
    labels Map<String,String>

    The labels associated with this dataset. You can use these to organize and group your datasets.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    lastModifiedTime Integer
    The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
    location String

    The geographic location where the dataset should reside. See official docs.

    There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places.

    The default value is multi-regional location US. Changing this forces a new resource to be created.

    maxTimeTravelHours String
    Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
    project String
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    pulumiLabels Map<String,String>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    selfLink String
    The URI of the created resource.
    storageBillingModel String
    Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
    accesses DatasetAccess[]
    An array of objects that define dataset access for one or more entities. Structure is documented below.
    creationTime number
    The time when this dataset was created, in milliseconds since the epoch.
    datasetId string
    A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


    defaultCollation string
    Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

    • 'und:ci': undetermined locale, case insensitive.
    • '': empty string. Default to case-sensitive behavior.
    defaultEncryptionConfiguration DatasetDefaultEncryptionConfiguration
    The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
    defaultPartitionExpirationMs number

    The default partition expiration for all partitioned tables in the dataset, in milliseconds.

    Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.

    defaultTableExpirationMs number

    The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

    Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.

    deleteContentsOnDestroy boolean
    If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
    description string
    A user-friendly description of the dataset
    effectiveLabels {[key: string]: string}
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    etag string
    A hash of the resource.
    externalDatasetReference DatasetExternalDatasetReference
    Information about the external metadata storage where the dataset is defined. Structure is documented below.
    friendlyName string
    A descriptive name for the dataset
    isCaseInsensitive boolean
    TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
    labels {[key: string]: string}

    The labels associated with this dataset. You can use these to organize and group your datasets.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    lastModifiedTime number
    The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
    location string

    The geographic location where the dataset should reside. See official docs.

    There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places.

    The default value is multi-regional location US. Changing this forces a new resource to be created.

    maxTimeTravelHours string
    Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
    project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    pulumiLabels {[key: string]: string}
    The combination of labels configured directly on the resource and default labels configured on the provider.
    selfLink string
    The URI of the created resource.
    storageBillingModel string
    Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
    accesses Sequence[DatasetAccessArgs]
    An array of objects that define dataset access for one or more entities. Structure is documented below.
    creation_time int
    The time when this dataset was created, in milliseconds since the epoch.
    dataset_id str
    A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


    default_collation str
    Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

    • 'und:ci': undetermined locale, case insensitive.
    • '': empty string. Default to case-sensitive behavior.
    default_encryption_configuration DatasetDefaultEncryptionConfigurationArgs
    The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
    default_partition_expiration_ms int

    The default partition expiration for all partitioned tables in the dataset, in milliseconds.

    Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.

    default_table_expiration_ms int

    The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

    Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.

    delete_contents_on_destroy bool
    If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
    description str
    A user-friendly description of the dataset
    effective_labels Mapping[str, str]
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    etag str
    A hash of the resource.
    external_dataset_reference DatasetExternalDatasetReferenceArgs
    Information about the external metadata storage where the dataset is defined. Structure is documented below.
    friendly_name str
    A descriptive name for the dataset
    is_case_insensitive bool
    TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
    labels Mapping[str, str]

    The labels associated with this dataset. You can use these to organize and group your datasets.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    last_modified_time int
    The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
    location str

    The geographic location where the dataset should reside. See official docs.

    There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places.

    The default value is multi-regional location US. Changing this forces a new resource to be created.

    max_time_travel_hours str
    Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
    project str
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    pulumi_labels Mapping[str, str]
    The combination of labels configured directly on the resource and default labels configured on the provider.
    self_link str
    The URI of the created resource.
    storage_billing_model str
    Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
    accesses List<Property Map>
    An array of objects that define dataset access for one or more entities. Structure is documented below.
    creationTime Number
    The time when this dataset was created, in milliseconds since the epoch.
    datasetId String
    A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.


    defaultCollation String
    Defines the default collation specification of future tables created in the dataset. If a table is created in this dataset without table-level default collation, then the table inherits the dataset default collation, which is applied to the string fields that do not have explicit collation specified. A change to this field affects only tables created afterwards, and does not alter the existing tables. The following values are supported:

    • 'und:ci': undetermined locale, case insensitive.
    • '': empty string. Default to case-sensitive behavior.
    defaultEncryptionConfiguration Property Map
    The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
    defaultPartitionExpirationMs Number

    The default partition expiration for all partitioned tables in the dataset, in milliseconds.

    Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use of defaultTableExpirationMs for partitioned tables: only one of defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned table. If you provide an explicit timePartitioning.expirationMs when creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.

    defaultTableExpirationMs Number

    The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

    Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.

    deleteContentsOnDestroy Boolean
    If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
    description String
    A user-friendly description of the dataset
    effectiveLabels Map<String>
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    etag String
    A hash of the resource.
    externalDatasetReference Property Map
    Information about the external metadata storage where the dataset is defined. Structure is documented below.
    friendlyName String
    A descriptive name for the dataset
    isCaseInsensitive Boolean
    TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
    labels Map<String>

    The labels associated with this dataset. You can use these to organize and group your datasets.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    lastModifiedTime Number
    The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
    location String

    The geographic location where the dataset should reside. See official docs.

    There are two types of locations, regional or multi-regional. A regional location is a specific geographic place, such as Tokyo, and a multi-regional location is a large geographic area, such as the United States, that contains at least two geographic places.

    The default value is multi-regional location US. Changing this forces a new resource to be created.

    maxTimeTravelHours String
    Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
    project String
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    pulumiLabels Map<String>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    selfLink String
    The URI of the created resource.
    storageBillingModel String
    Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.

    Supporting Types

    DatasetAccess, DatasetAccessArgs

    Dataset DatasetAccessDataset
    Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
    Domain string
    A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
    GroupByEmail string
    An email address of a Google Group to grant access to.
    IamMember string
    Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: allUsers
    Role string
    Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
    Routine DatasetAccessRoutine
    A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
    SpecialGroup string
    A special group to grant access to. Possible values include:
    UserByEmail string
    An email address of a user to grant access to. For example: fred@example.com
    View DatasetAccessView
    A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
    Dataset DatasetAccessDataset
    Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
    Domain string
    A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
    GroupByEmail string
    An email address of a Google Group to grant access to.
    IamMember string
    Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: allUsers
    Role string
    Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
    Routine DatasetAccessRoutine
    A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
    SpecialGroup string
    A special group to grant access to. Possible values include:
    UserByEmail string
    An email address of a user to grant access to. For example: fred@example.com
    View DatasetAccessView
    A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
    dataset DatasetAccessDataset
    Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
    domain String
    A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
    groupByEmail String
    An email address of a Google Group to grant access to.
    iamMember String
    Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: allUsers
    role String
    Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
    routine DatasetAccessRoutine
    A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
    specialGroup String
    A special group to grant access to. Possible values include:
    userByEmail String
    An email address of a user to grant access to. For example: fred@example.com
    view DatasetAccessView
    A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
    dataset DatasetAccessDataset
    Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
    domain string
    A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
    groupByEmail string
    An email address of a Google Group to grant access to.
    iamMember string
    Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: allUsers
    role string
    Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
    routine DatasetAccessRoutine
    A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
    specialGroup string
    A special group to grant access to. Possible values include:
    userByEmail string
    An email address of a user to grant access to. For example: fred@example.com
    view DatasetAccessView
    A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
    dataset DatasetAccessDataset
    Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
    domain str
    A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
    group_by_email str
    An email address of a Google Group to grant access to.
    iam_member str
    Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: allUsers
    role str
    Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
    routine DatasetAccessRoutine
    A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
    special_group str
    A special group to grant access to. Possible values include:
    user_by_email str
    An email address of a user to grant access to. For example: fred@example.com
    view DatasetAccessView
    A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
    dataset Property Map
    Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
    domain String
    A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
    groupByEmail String
    An email address of a Google Group to grant access to.
    iamMember String
    Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: allUsers
    role String
    Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
    routine Property Map
    A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
    specialGroup String
    A special group to grant access to. Possible values include:
    userByEmail String
    An email address of a user to grant access to. For example: fred@example.com
    view Property Map
    A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.

    DatasetAccessDataset, DatasetAccessDatasetArgs

    Dataset DatasetAccessDatasetDataset
    The dataset this entry applies to Structure is documented below.
    TargetTypes List<string>
    Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
    Dataset DatasetAccessDatasetDataset
    The dataset this entry applies to Structure is documented below.
    TargetTypes []string
    Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
    dataset DatasetAccessDatasetDataset
    The dataset this entry applies to Structure is documented below.
    targetTypes List<String>
    Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
    dataset DatasetAccessDatasetDataset
    The dataset this entry applies to Structure is documented below.
    targetTypes string[]
    Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
    dataset DatasetAccessDatasetDataset
    The dataset this entry applies to Structure is documented below.
    target_types Sequence[str]
    Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
    dataset Property Map
    The dataset this entry applies to Structure is documented below.
    targetTypes List<String>
    Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS

    DatasetAccessDatasetDataset, DatasetAccessDatasetDatasetArgs

    DatasetId string
    The ID of the dataset containing this table.
    ProjectId string
    The ID of the project containing this table.
    DatasetId string
    The ID of the dataset containing this table.
    ProjectId string
    The ID of the project containing this table.
    datasetId String
    The ID of the dataset containing this table.
    projectId String
    The ID of the project containing this table.
    datasetId string
    The ID of the dataset containing this table.
    projectId string
    The ID of the project containing this table.
    dataset_id str
    The ID of the dataset containing this table.
    project_id str
    The ID of the project containing this table.
    datasetId String
    The ID of the dataset containing this table.
    projectId String
    The ID of the project containing this table.

    DatasetAccessRoutine, DatasetAccessRoutineArgs

    DatasetId string
    The ID of the dataset containing this table.
    ProjectId string
    The ID of the project containing this table.
    RoutineId string
    The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
    DatasetId string
    The ID of the dataset containing this table.
    ProjectId string
    The ID of the project containing this table.
    RoutineId string
    The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
    datasetId String
    The ID of the dataset containing this table.
    projectId String
    The ID of the project containing this table.
    routineId String
    The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
    datasetId string
    The ID of the dataset containing this table.
    projectId string
    The ID of the project containing this table.
    routineId string
    The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
    dataset_id str
    The ID of the dataset containing this table.
    project_id str
    The ID of the project containing this table.
    routine_id str
    The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
    datasetId String
    The ID of the dataset containing this table.
    projectId String
    The ID of the project containing this table.
    routineId String
    The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.

    DatasetAccessView, DatasetAccessViewArgs

    DatasetId string
    The ID of the dataset containing this table.
    ProjectId string
    The ID of the project containing this table.
    TableId string
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    DatasetId string
    The ID of the dataset containing this table.
    ProjectId string
    The ID of the project containing this table.
    TableId string
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    datasetId String
    The ID of the dataset containing this table.
    projectId String
    The ID of the project containing this table.
    tableId String
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    datasetId string
    The ID of the dataset containing this table.
    projectId string
    The ID of the project containing this table.
    tableId string
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    dataset_id str
    The ID of the dataset containing this table.
    project_id str
    The ID of the project containing this table.
    table_id str
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
    datasetId String
    The ID of the dataset containing this table.
    projectId String
    The ID of the project containing this table.
    tableId String
    The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

    DatasetDefaultEncryptionConfiguration, DatasetDefaultEncryptionConfigurationArgs

    KmsKeyName string
    Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
    KmsKeyName string
    Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
    kmsKeyName String
    Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
    kmsKeyName string
    Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
    kms_key_name str
    Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
    kmsKeyName String
    Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.

    DatasetExternalDatasetReference, DatasetExternalDatasetReferenceArgs

    Connection string
    The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
    ExternalSource string
    External source that backs this dataset.
    Connection string
    The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
    ExternalSource string
    External source that backs this dataset.
    connection String
    The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
    externalSource String
    External source that backs this dataset.
    connection string
    The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
    externalSource string
    External source that backs this dataset.
    connection str
    The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
    external_source str
    External source that backs this dataset.
    connection String
    The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
    externalSource String
    External source that backs this dataset.

    Import

    Dataset can be imported using any of these accepted formats:

    • projects/{{project}}/datasets/{{dataset_id}}

    • {{project}}/{{dataset_id}}

    • {{dataset_id}}

    When using the pulumi import command, Dataset can be imported using one of the formats above. For example:

    $ pulumi import gcp:bigquery/dataset:Dataset default projects/{{project}}/datasets/{{dataset_id}}
    
    $ pulumi import gcp:bigquery/dataset:Dataset default {{project}}/{{dataset_id}}
    
    $ pulumi import gcp:bigquery/dataset:Dataset default {{dataset_id}}
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    Google Cloud (GCP) Classic pulumi/pulumi-gcp
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the google-beta Terraform Provider.
    gcp logo
    Google Cloud Classic v7.19.0 published on Thursday, Apr 18, 2024 by Pulumi