gcp logo
Google Cloud Classic v6.52.0, Mar 22 23

gcp.bigquery.Dataset

Import

Dataset can be imported using any of these accepted formats

 $ pulumi import gcp:bigquery/dataset:Dataset default projects/{{project}}/datasets/{{dataset_id}}
 $ pulumi import gcp:bigquery/dataset:Dataset default {{project}}/{{dataset_id}}
 $ pulumi import gcp:bigquery/dataset:Dataset default {{dataset_id}}

Example Usage

Bigquery Dataset Basic

using System.Collections.Generic;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var bqowner = new Gcp.ServiceAccount.Account("bqowner", new()
    {
        AccountId = "bqowner",
    });

    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
        FriendlyName = "test",
        Description = "This is a test description",
        Location = "EU",
        DefaultTableExpirationMs = 3600000,
        Labels = 
        {
            { "env", "default" },
        },
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "OWNER",
                UserByEmail = bqowner.Email,
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "READER",
                Domain = "hashicorp.com",
            },
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/serviceAccount"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		bqowner, err := serviceAccount.NewAccount(ctx, "bqowner", &serviceAccount.AccountArgs{
			AccountId: pulumi.String("bqowner"),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:                pulumi.String("example_dataset"),
			FriendlyName:             pulumi.String("test"),
			Description:              pulumi.String("This is a test description"),
			Location:                 pulumi.String("EU"),
			DefaultTableExpirationMs: pulumi.Int(3600000),
			Labels: pulumi.StringMap{
				"env": pulumi.String("default"),
			},
			Accesses: bigquery.DatasetAccessTypeArray{
				&bigquery.DatasetAccessTypeArgs{
					Role:        pulumi.String("OWNER"),
					UserByEmail: bqowner.Email,
				},
				&bigquery.DatasetAccessTypeArgs{
					Role:   pulumi.String("READER"),
					Domain: pulumi.String("hashicorp.com"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceAccount.Account;
import com.pulumi.gcp.serviceAccount.AccountArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var bqowner = new Account("bqowner", AccountArgs.builder()        
            .accountId("bqowner")
            .build());

        var dataset = new Dataset("dataset", DatasetArgs.builder()        
            .datasetId("example_dataset")
            .friendlyName("test")
            .description("This is a test description")
            .location("EU")
            .defaultTableExpirationMs(3600000)
            .labels(Map.of("env", "default"))
            .accesses(            
                DatasetAccessArgs.builder()
                    .role("OWNER")
                    .userByEmail(bqowner.email())
                    .build(),
                DatasetAccessArgs.builder()
                    .role("READER")
                    .domain("hashicorp.com")
                    .build())
            .build());

    }
}
import pulumi
import pulumi_gcp as gcp

bqowner = gcp.service_account.Account("bqowner", account_id="bqowner")
dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="example_dataset",
    friendly_name="test",
    description="This is a test description",
    location="EU",
    default_table_expiration_ms=3600000,
    labels={
        "env": "default",
    },
    accesses=[
        gcp.bigquery.DatasetAccessArgs(
            role="OWNER",
            user_by_email=bqowner.email,
        ),
        gcp.bigquery.DatasetAccessArgs(
            role="READER",
            domain="hashicorp.com",
        ),
    ])
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const bqowner = new gcp.serviceaccount.Account("bqowner", {accountId: "bqowner"});
const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "example_dataset",
    friendlyName: "test",
    description: "This is a test description",
    location: "EU",
    defaultTableExpirationMs: 3600000,
    labels: {
        env: "default",
    },
    accesses: [
        {
            role: "OWNER",
            userByEmail: bqowner.email,
        },
        {
            role: "READER",
            domain: "hashicorp.com",
        },
    ],
});
resources:
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset
      friendlyName: test
      description: This is a test description
      location: EU
      defaultTableExpirationMs: 3.6e+06
      labels:
        env: default
      accesses:
        - role: OWNER
          userByEmail: ${bqowner.email}
        - role: READER
          domain: hashicorp.com
  bqowner:
    type: gcp:serviceAccount:Account
    properties:
      accountId: bqowner

Bigquery Dataset Cmek

using System.Collections.Generic;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var keyRing = new Gcp.Kms.KeyRing("keyRing", new()
    {
        Location = "us",
    });

    var cryptoKey = new Gcp.Kms.CryptoKey("cryptoKey", new()
    {
        KeyRing = keyRing.Id,
    });

    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
        FriendlyName = "test",
        Description = "This is a test description",
        Location = "US",
        DefaultTableExpirationMs = 3600000,
        DefaultEncryptionConfiguration = new Gcp.BigQuery.Inputs.DatasetDefaultEncryptionConfigurationArgs
        {
            KmsKeyName = cryptoKey.Id,
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/kms"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		keyRing, err := kms.NewKeyRing(ctx, "keyRing", &kms.KeyRingArgs{
			Location: pulumi.String("us"),
		})
		if err != nil {
			return err
		}
		cryptoKey, err := kms.NewCryptoKey(ctx, "cryptoKey", &kms.CryptoKeyArgs{
			KeyRing: keyRing.ID(),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:                pulumi.String("example_dataset"),
			FriendlyName:             pulumi.String("test"),
			Description:              pulumi.String("This is a test description"),
			Location:                 pulumi.String("US"),
			DefaultTableExpirationMs: pulumi.Int(3600000),
			DefaultEncryptionConfiguration: &bigquery.DatasetDefaultEncryptionConfigurationArgs{
				KmsKeyName: cryptoKey.ID(),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.kms.KeyRing;
import com.pulumi.gcp.kms.KeyRingArgs;
import com.pulumi.gcp.kms.CryptoKey;
import com.pulumi.gcp.kms.CryptoKeyArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetDefaultEncryptionConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var keyRing = new KeyRing("keyRing", KeyRingArgs.builder()        
            .location("us")
            .build());

        var cryptoKey = new CryptoKey("cryptoKey", CryptoKeyArgs.builder()        
            .keyRing(keyRing.id())
            .build());

        var dataset = new Dataset("dataset", DatasetArgs.builder()        
            .datasetId("example_dataset")
            .friendlyName("test")
            .description("This is a test description")
            .location("US")
            .defaultTableExpirationMs(3600000)
            .defaultEncryptionConfiguration(DatasetDefaultEncryptionConfigurationArgs.builder()
                .kmsKeyName(cryptoKey.id())
                .build())
            .build());

    }
}
import pulumi
import pulumi_gcp as gcp

key_ring = gcp.kms.KeyRing("keyRing", location="us")
crypto_key = gcp.kms.CryptoKey("cryptoKey", key_ring=key_ring.id)
dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="example_dataset",
    friendly_name="test",
    description="This is a test description",
    location="US",
    default_table_expiration_ms=3600000,
    default_encryption_configuration=gcp.bigquery.DatasetDefaultEncryptionConfigurationArgs(
        kms_key_name=crypto_key.id,
    ))
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const keyRing = new gcp.kms.KeyRing("keyRing", {location: "us"});
const cryptoKey = new gcp.kms.CryptoKey("cryptoKey", {keyRing: keyRing.id});
const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "example_dataset",
    friendlyName: "test",
    description: "This is a test description",
    location: "US",
    defaultTableExpirationMs: 3600000,
    defaultEncryptionConfiguration: {
        kmsKeyName: cryptoKey.id,
    },
});
resources:
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset
      friendlyName: test
      description: This is a test description
      location: US
      defaultTableExpirationMs: 3.6e+06
      defaultEncryptionConfiguration:
        kmsKeyName: ${cryptoKey.id}
  cryptoKey:
    type: gcp:kms:CryptoKey
    properties:
      keyRing: ${keyRing.id}
  keyRing:
    type: gcp:kms:KeyRing
    properties:
      location: us

Bigquery Dataset Authorized Dataset

using System.Collections.Generic;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var bqowner = new Gcp.ServiceAccount.Account("bqowner", new()
    {
        AccountId = "bqowner",
    });

    var @public = new Gcp.BigQuery.Dataset("public", new()
    {
        DatasetId = "public",
        FriendlyName = "test",
        Description = "This dataset is public",
        Location = "EU",
        DefaultTableExpirationMs = 3600000,
        Labels = 
        {
            { "env", "default" },
        },
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "OWNER",
                UserByEmail = bqowner.Email,
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "READER",
                Domain = "hashicorp.com",
            },
        },
    });

    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "private",
        FriendlyName = "test",
        Description = "This dataset is private",
        Location = "EU",
        DefaultTableExpirationMs = 3600000,
        Labels = 
        {
            { "env", "default" },
        },
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "OWNER",
                UserByEmail = bqowner.Email,
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "READER",
                Domain = "hashicorp.com",
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetArgs
                {
                    Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetDatasetArgs
                    {
                        ProjectId = @public.Project,
                        DatasetId = @public.DatasetId,
                    },
                    TargetTypes = new[]
                    {
                        "VIEWS",
                    },
                },
            },
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/serviceAccount"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		bqowner, err := serviceAccount.NewAccount(ctx, "bqowner", &serviceAccount.AccountArgs{
			AccountId: pulumi.String("bqowner"),
		})
		if err != nil {
			return err
		}
		public, err := bigquery.NewDataset(ctx, "public", &bigquery.DatasetArgs{
			DatasetId:                pulumi.String("public"),
			FriendlyName:             pulumi.String("test"),
			Description:              pulumi.String("This dataset is public"),
			Location:                 pulumi.String("EU"),
			DefaultTableExpirationMs: pulumi.Int(3600000),
			Labels: pulumi.StringMap{
				"env": pulumi.String("default"),
			},
			Accesses: bigquery.DatasetAccessTypeArray{
				&bigquery.DatasetAccessTypeArgs{
					Role:        pulumi.String("OWNER"),
					UserByEmail: bqowner.Email,
				},
				&bigquery.DatasetAccessTypeArgs{
					Role:   pulumi.String("READER"),
					Domain: pulumi.String("hashicorp.com"),
				},
			},
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:                pulumi.String("private"),
			FriendlyName:             pulumi.String("test"),
			Description:              pulumi.String("This dataset is private"),
			Location:                 pulumi.String("EU"),
			DefaultTableExpirationMs: pulumi.Int(3600000),
			Labels: pulumi.StringMap{
				"env": pulumi.String("default"),
			},
			Accesses: bigquery.DatasetAccessTypeArray{
				&bigquery.DatasetAccessTypeArgs{
					Role:        pulumi.String("OWNER"),
					UserByEmail: bqowner.Email,
				},
				&bigquery.DatasetAccessTypeArgs{
					Role:   pulumi.String("READER"),
					Domain: pulumi.String("hashicorp.com"),
				},
				&bigquery.DatasetAccessTypeArgs{
					Dataset: &bigquery.DatasetAccessDatasetArgs{
						Dataset: &bigquery.DatasetAccessDatasetDatasetArgs{
							ProjectId: public.Project,
							DatasetId: public.DatasetId,
						},
						TargetTypes: pulumi.StringArray{
							pulumi.String("VIEWS"),
						},
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceAccount.Account;
import com.pulumi.gcp.serviceAccount.AccountArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessDatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessDatasetDatasetArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var bqowner = new Account("bqowner", AccountArgs.builder()        
            .accountId("bqowner")
            .build());

        var public_ = new Dataset("public", DatasetArgs.builder()        
            .datasetId("public")
            .friendlyName("test")
            .description("This dataset is public")
            .location("EU")
            .defaultTableExpirationMs(3600000)
            .labels(Map.of("env", "default"))
            .accesses(            
                DatasetAccessArgs.builder()
                    .role("OWNER")
                    .userByEmail(bqowner.email())
                    .build(),
                DatasetAccessArgs.builder()
                    .role("READER")
                    .domain("hashicorp.com")
                    .build())
            .build());

        var dataset = new Dataset("dataset", DatasetArgs.builder()        
            .datasetId("private")
            .friendlyName("test")
            .description("This dataset is private")
            .location("EU")
            .defaultTableExpirationMs(3600000)
            .labels(Map.of("env", "default"))
            .accesses(            
                DatasetAccessArgs.builder()
                    .role("OWNER")
                    .userByEmail(bqowner.email())
                    .build(),
                DatasetAccessArgs.builder()
                    .role("READER")
                    .domain("hashicorp.com")
                    .build(),
                DatasetAccessArgs.builder()
                    .dataset(DatasetAccessDatasetArgs.builder()
                        .dataset(DatasetAccessDatasetDatasetArgs.builder()
                            .projectId(public_.project())
                            .datasetId(public_.datasetId())
                            .build())
                        .targetTypes("VIEWS")
                        .build())
                    .build())
            .build());

    }
}
import pulumi
import pulumi_gcp as gcp

bqowner = gcp.service_account.Account("bqowner", account_id="bqowner")
public = gcp.bigquery.Dataset("public",
    dataset_id="public",
    friendly_name="test",
    description="This dataset is public",
    location="EU",
    default_table_expiration_ms=3600000,
    labels={
        "env": "default",
    },
    accesses=[
        gcp.bigquery.DatasetAccessArgs(
            role="OWNER",
            user_by_email=bqowner.email,
        ),
        gcp.bigquery.DatasetAccessArgs(
            role="READER",
            domain="hashicorp.com",
        ),
    ])
dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="private",
    friendly_name="test",
    description="This dataset is private",
    location="EU",
    default_table_expiration_ms=3600000,
    labels={
        "env": "default",
    },
    accesses=[
        gcp.bigquery.DatasetAccessArgs(
            role="OWNER",
            user_by_email=bqowner.email,
        ),
        gcp.bigquery.DatasetAccessArgs(
            role="READER",
            domain="hashicorp.com",
        ),
        gcp.bigquery.DatasetAccessArgs(
            dataset=gcp.bigquery.DatasetAccessDatasetArgs(
                dataset=gcp.bigquery.DatasetAccessDatasetDatasetArgs(
                    project_id=public.project,
                    dataset_id=public.dataset_id,
                ),
                target_types=["VIEWS"],
            ),
        ),
    ])
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const bqowner = new gcp.serviceaccount.Account("bqowner", {accountId: "bqowner"});
const _public = new gcp.bigquery.Dataset("public", {
    datasetId: "public",
    friendlyName: "test",
    description: "This dataset is public",
    location: "EU",
    defaultTableExpirationMs: 3600000,
    labels: {
        env: "default",
    },
    accesses: [
        {
            role: "OWNER",
            userByEmail: bqowner.email,
        },
        {
            role: "READER",
            domain: "hashicorp.com",
        },
    ],
});
const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "private",
    friendlyName: "test",
    description: "This dataset is private",
    location: "EU",
    defaultTableExpirationMs: 3600000,
    labels: {
        env: "default",
    },
    accesses: [
        {
            role: "OWNER",
            userByEmail: bqowner.email,
        },
        {
            role: "READER",
            domain: "hashicorp.com",
        },
        {
            dataset: {
                dataset: {
                    projectId: _public.project,
                    datasetId: _public.datasetId,
                },
                targetTypes: ["VIEWS"],
            },
        },
    ],
});
resources:
  public:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: public
      friendlyName: test
      description: This dataset is public
      location: EU
      defaultTableExpirationMs: 3.6e+06
      labels:
        env: default
      accesses:
        - role: OWNER
          userByEmail: ${bqowner.email}
        - role: READER
          domain: hashicorp.com
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: private
      friendlyName: test
      description: This dataset is private
      location: EU
      defaultTableExpirationMs: 3.6e+06
      labels:
        env: default
      accesses:
        - role: OWNER
          userByEmail: ${bqowner.email}
        - role: READER
          domain: hashicorp.com
        - dataset:
            dataset:
              projectId: ${public.project}
              datasetId: ${public.datasetId}
            targetTypes:
              - VIEWS
  bqowner:
    type: gcp:serviceAccount:Account
    properties:
      accountId: bqowner

Bigquery Dataset Authorized Routine

using System.Collections.Generic;
using System.Text.Json;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var publicDataset = new Gcp.BigQuery.Dataset("publicDataset", new()
    {
        DatasetId = "public_dataset",
        Description = "This dataset is public",
    });

    var publicRoutine = new Gcp.BigQuery.Routine("publicRoutine", new()
    {
        DatasetId = publicDataset.DatasetId,
        RoutineId = "public_routine",
        RoutineType = "TABLE_VALUED_FUNCTION",
        Language = "SQL",
        DefinitionBody = @"SELECT 1 + value AS value
",
        Arguments = new[]
        {
            new Gcp.BigQuery.Inputs.RoutineArgumentArgs
            {
                Name = "value",
                ArgumentKind = "FIXED_TYPE",
                DataType = JsonSerializer.Serialize(new Dictionary<string, object?>
                {
                    ["typeKind"] = "INT64",
                }),
            },
        },
        ReturnTableType = JsonSerializer.Serialize(new Dictionary<string, object?>
        {
            ["columns"] = new[]
            {
                new Dictionary<string, object?>
                {
                    ["name"] = "value",
                    ["type"] = new Dictionary<string, object?>
                    {
                        ["typeKind"] = "INT64",
                    },
                },
            },
        }),
    });

    var @private = new Gcp.BigQuery.Dataset("private", new()
    {
        DatasetId = "private_dataset",
        Description = "This dataset is private",
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "OWNER",
                UserByEmail = "emailAddress:my@service-account.com",
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Routine = new Gcp.BigQuery.Inputs.DatasetAccessRoutineArgs
                {
                    ProjectId = publicRoutine.Project,
                    DatasetId = publicRoutine.DatasetId,
                    RoutineId = publicRoutine.RoutineId,
                },
            },
        },
    });

});
package main

import (
	"encoding/json"

	"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		publicDataset, err := bigquery.NewDataset(ctx, "publicDataset", &bigquery.DatasetArgs{
			DatasetId:   pulumi.String("public_dataset"),
			Description: pulumi.String("This dataset is public"),
		})
		if err != nil {
			return err
		}
		tmpJSON0, err := json.Marshal(map[string]interface{}{
			"typeKind": "INT64",
		})
		if err != nil {
			return err
		}
		json0 := string(tmpJSON0)
		tmpJSON1, err := json.Marshal(map[string]interface{}{
			"columns": []map[string]interface{}{
				map[string]interface{}{
					"name": "value",
					"type": map[string]interface{}{
						"typeKind": "INT64",
					},
				},
			},
		})
		if err != nil {
			return err
		}
		json1 := string(tmpJSON1)
		publicRoutine, err := bigquery.NewRoutine(ctx, "publicRoutine", &bigquery.RoutineArgs{
			DatasetId:      publicDataset.DatasetId,
			RoutineId:      pulumi.String("public_routine"),
			RoutineType:    pulumi.String("TABLE_VALUED_FUNCTION"),
			Language:       pulumi.String("SQL"),
			DefinitionBody: pulumi.String("SELECT 1 + value AS value\n"),
			Arguments: bigquery.RoutineArgumentArray{
				&bigquery.RoutineArgumentArgs{
					Name:         pulumi.String("value"),
					ArgumentKind: pulumi.String("FIXED_TYPE"),
					DataType:     pulumi.String(json0),
				},
			},
			ReturnTableType: pulumi.String(json1),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataset(ctx, "private", &bigquery.DatasetArgs{
			DatasetId:   pulumi.String("private_dataset"),
			Description: pulumi.String("This dataset is private"),
			Accesses: bigquery.DatasetAccessTypeArray{
				&bigquery.DatasetAccessTypeArgs{
					Role:        pulumi.String("OWNER"),
					UserByEmail: pulumi.String("emailAddress:my@service-account.com"),
				},
				&bigquery.DatasetAccessTypeArgs{
					Routine: &bigquery.DatasetAccessRoutineArgs{
						ProjectId: publicRoutine.Project,
						DatasetId: publicRoutine.DatasetId,
						RoutineId: publicRoutine.RoutineId,
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.Routine;
import com.pulumi.gcp.bigquery.RoutineArgs;
import com.pulumi.gcp.bigquery.inputs.RoutineArgumentArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessRoutineArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var publicDataset = new Dataset("publicDataset", DatasetArgs.builder()        
            .datasetId("public_dataset")
            .description("This dataset is public")
            .build());

        var publicRoutine = new Routine("publicRoutine", RoutineArgs.builder()        
            .datasetId(publicDataset.datasetId())
            .routineId("public_routine")
            .routineType("TABLE_VALUED_FUNCTION")
            .language("SQL")
            .definitionBody("""
SELECT 1 + value AS value
            """)
            .arguments(RoutineArgumentArgs.builder()
                .name("value")
                .argumentKind("FIXED_TYPE")
                .dataType(serializeJson(
                    jsonObject(
                        jsonProperty("typeKind", "INT64")
                    )))
                .build())
            .returnTableType(serializeJson(
                jsonObject(
                    jsonProperty("columns", jsonArray(jsonObject(
                        jsonProperty("name", "value"),
                        jsonProperty("type", jsonObject(
                            jsonProperty("typeKind", "INT64")
                        ))
                    )))
                )))
            .build());

        var private_ = new Dataset("private", DatasetArgs.builder()        
            .datasetId("private_dataset")
            .description("This dataset is private")
            .accesses(            
                DatasetAccessArgs.builder()
                    .role("OWNER")
                    .userByEmail("emailAddress:my@service-account.com")
                    .build(),
                DatasetAccessArgs.builder()
                    .routine(DatasetAccessRoutineArgs.builder()
                        .projectId(publicRoutine.project())
                        .datasetId(publicRoutine.datasetId())
                        .routineId(publicRoutine.routineId())
                        .build())
                    .build())
            .build());

    }
}
import pulumi
import json
import pulumi_gcp as gcp

public_dataset = gcp.bigquery.Dataset("publicDataset",
    dataset_id="public_dataset",
    description="This dataset is public")
public_routine = gcp.bigquery.Routine("publicRoutine",
    dataset_id=public_dataset.dataset_id,
    routine_id="public_routine",
    routine_type="TABLE_VALUED_FUNCTION",
    language="SQL",
    definition_body="SELECT 1 + value AS value\n",
    arguments=[gcp.bigquery.RoutineArgumentArgs(
        name="value",
        argument_kind="FIXED_TYPE",
        data_type=json.dumps({
            "typeKind": "INT64",
        }),
    )],
    return_table_type=json.dumps({
        "columns": [{
            "name": "value",
            "type": {
                "typeKind": "INT64",
            },
        }],
    }))
private = gcp.bigquery.Dataset("private",
    dataset_id="private_dataset",
    description="This dataset is private",
    accesses=[
        gcp.bigquery.DatasetAccessArgs(
            role="OWNER",
            user_by_email="emailAddress:my@service-account.com",
        ),
        gcp.bigquery.DatasetAccessArgs(
            routine=gcp.bigquery.DatasetAccessRoutineArgs(
                project_id=public_routine.project,
                dataset_id=public_routine.dataset_id,
                routine_id=public_routine.routine_id,
            ),
        ),
    ])
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const publicDataset = new gcp.bigquery.Dataset("publicDataset", {
    datasetId: "public_dataset",
    description: "This dataset is public",
});
const publicRoutine = new gcp.bigquery.Routine("publicRoutine", {
    datasetId: publicDataset.datasetId,
    routineId: "public_routine",
    routineType: "TABLE_VALUED_FUNCTION",
    language: "SQL",
    definitionBody: "SELECT 1 + value AS value\n",
    arguments: [{
        name: "value",
        argumentKind: "FIXED_TYPE",
        dataType: JSON.stringify({
            typeKind: "INT64",
        }),
    }],
    returnTableType: JSON.stringify({
        columns: [{
            name: "value",
            type: {
                typeKind: "INT64",
            },
        }],
    }),
});
const _private = new gcp.bigquery.Dataset("private", {
    datasetId: "private_dataset",
    description: "This dataset is private",
    accesses: [
        {
            role: "OWNER",
            userByEmail: "emailAddress:my@service-account.com",
        },
        {
            routine: {
                projectId: publicRoutine.project,
                datasetId: publicRoutine.datasetId,
                routineId: publicRoutine.routineId,
            },
        },
    ],
});
resources:
  publicDataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: public_dataset
      description: This dataset is public
  publicRoutine:
    type: gcp:bigquery:Routine
    properties:
      datasetId: ${publicDataset.datasetId}
      routineId: public_routine
      routineType: TABLE_VALUED_FUNCTION
      language: SQL
      definitionBody: |
                SELECT 1 + value AS value
      arguments:
        - name: value
          argumentKind: FIXED_TYPE
          dataType:
            fn::toJSON:
              typeKind: INT64
      returnTableType:
        fn::toJSON:
          columns:
            - name: value
              type:
                typeKind: INT64
  private:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: private_dataset
      description: This dataset is private
      accesses:
        - role: OWNER
          userByEmail: emailAddress:my@service-account.com
        - routine:
            projectId: ${publicRoutine.project}
            datasetId: ${publicRoutine.datasetId}
            routineId: ${publicRoutine.routineId}

Create Dataset Resource

new Dataset(name: string, args: DatasetArgs, opts?: CustomResourceOptions);
@overload
def Dataset(resource_name: str,
            opts: Optional[ResourceOptions] = None,
            accesses: Optional[Sequence[DatasetAccessArgs]] = None,
            dataset_id: Optional[str] = None,
            default_encryption_configuration: Optional[DatasetDefaultEncryptionConfigurationArgs] = None,
            default_partition_expiration_ms: Optional[int] = None,
            default_table_expiration_ms: Optional[int] = None,
            delete_contents_on_destroy: Optional[bool] = None,
            description: Optional[str] = None,
            friendly_name: Optional[str] = None,
            labels: Optional[Mapping[str, str]] = None,
            location: Optional[str] = None,
            max_time_travel_hours: Optional[str] = None,
            project: Optional[str] = None)
@overload
def Dataset(resource_name: str,
            args: DatasetArgs,
            opts: Optional[ResourceOptions] = None)
func NewDataset(ctx *Context, name string, args DatasetArgs, opts ...ResourceOption) (*Dataset, error)
public Dataset(string name, DatasetArgs args, CustomResourceOptions? opts = null)
public Dataset(String name, DatasetArgs args)
public Dataset(String name, DatasetArgs args, CustomResourceOptions options)
type: gcp:bigquery:Dataset
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args DatasetArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args DatasetArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args DatasetArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args DatasetArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args DatasetArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Dataset Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The Dataset resource accepts the following input properties:

DatasetId string

A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

Accesses List<DatasetAccessArgs>

An array of objects that define dataset access for one or more entities. Structure is documented below.

DefaultEncryptionConfiguration DatasetDefaultEncryptionConfigurationArgs

The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.

DefaultPartitionExpirationMs int

The default partition expiration for all partitioned tables in the dataset, in milliseconds.

DefaultTableExpirationMs int

The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

DeleteContentsOnDestroy bool

If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.

Description string

A user-friendly description of the dataset

FriendlyName string

A descriptive name for the dataset

Labels Dictionary<string, string>

The labels associated with this dataset. You can use these to organize and group your datasets

Location string

The geographic location where the dataset should reside. See official docs.

MaxTimeTravelHours string

Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).

Project string

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

DatasetId string

A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

Accesses []DatasetAccessTypeArgs

An array of objects that define dataset access for one or more entities. Structure is documented below.

DefaultEncryptionConfiguration DatasetDefaultEncryptionConfigurationArgs

The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.

DefaultPartitionExpirationMs int

The default partition expiration for all partitioned tables in the dataset, in milliseconds.

DefaultTableExpirationMs int

The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

DeleteContentsOnDestroy bool

If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.

Description string

A user-friendly description of the dataset

FriendlyName string

A descriptive name for the dataset

Labels map[string]string

The labels associated with this dataset. You can use these to organize and group your datasets

Location string

The geographic location where the dataset should reside. See official docs.

MaxTimeTravelHours string

Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).

Project string

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

datasetId String

A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

accesses List<DatasetAccessArgs>

An array of objects that define dataset access for one or more entities. Structure is documented below.

defaultEncryptionConfiguration DatasetDefaultEncryptionConfigurationArgs

The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.

defaultPartitionExpirationMs Integer

The default partition expiration for all partitioned tables in the dataset, in milliseconds.

defaultTableExpirationMs Integer

The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

deleteContentsOnDestroy Boolean

If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.

description String

A user-friendly description of the dataset

friendlyName String

A descriptive name for the dataset

labels Map<String,String>

The labels associated with this dataset. You can use these to organize and group your datasets

location String

The geographic location where the dataset should reside. See official docs.

maxTimeTravelHours String

Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).

project String

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

datasetId string

A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

accesses DatasetAccessArgs[]

An array of objects that define dataset access for one or more entities. Structure is documented below.

defaultEncryptionConfiguration DatasetDefaultEncryptionConfigurationArgs

The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.

defaultPartitionExpirationMs number

The default partition expiration for all partitioned tables in the dataset, in milliseconds.

defaultTableExpirationMs number

The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

deleteContentsOnDestroy boolean

If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.

description string

A user-friendly description of the dataset

friendlyName string

A descriptive name for the dataset

labels {[key: string]: string}

The labels associated with this dataset. You can use these to organize and group your datasets

location string

The geographic location where the dataset should reside. See official docs.

maxTimeTravelHours string

Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).

project string

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

dataset_id str

A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

accesses Sequence[DatasetAccessArgs]

An array of objects that define dataset access for one or more entities. Structure is documented below.

default_encryption_configuration DatasetDefaultEncryptionConfigurationArgs

The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.

default_partition_expiration_ms int

The default partition expiration for all partitioned tables in the dataset, in milliseconds.

default_table_expiration_ms int

The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

delete_contents_on_destroy bool

If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.

description str

A user-friendly description of the dataset

friendly_name str

A descriptive name for the dataset

labels Mapping[str, str]

The labels associated with this dataset. You can use these to organize and group your datasets

location str

The geographic location where the dataset should reside. See official docs.

max_time_travel_hours str

Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).

project str

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

datasetId String

A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

accesses List<Property Map>

An array of objects that define dataset access for one or more entities. Structure is documented below.

defaultEncryptionConfiguration Property Map

The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.

defaultPartitionExpirationMs Number

The default partition expiration for all partitioned tables in the dataset, in milliseconds.

defaultTableExpirationMs Number

The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

deleteContentsOnDestroy Boolean

If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.

description String

A user-friendly description of the dataset

friendlyName String

A descriptive name for the dataset

labels Map<String>

The labels associated with this dataset. You can use these to organize and group your datasets

location String

The geographic location where the dataset should reside. See official docs.

maxTimeTravelHours String

Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).

project String

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

Outputs

All input properties are implicitly available as output properties. Additionally, the Dataset resource produces the following output properties:

CreationTime int

The time when this dataset was created, in milliseconds since the epoch.

Etag string

A hash of the resource.

Id string

The provider-assigned unique ID for this managed resource.

LastModifiedTime int

The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.

SelfLink string

The URI of the created resource.

CreationTime int

The time when this dataset was created, in milliseconds since the epoch.

Etag string

A hash of the resource.

Id string

The provider-assigned unique ID for this managed resource.

LastModifiedTime int

The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.

SelfLink string

The URI of the created resource.

creationTime Integer

The time when this dataset was created, in milliseconds since the epoch.

etag String

A hash of the resource.

id String

The provider-assigned unique ID for this managed resource.

lastModifiedTime Integer

The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.

selfLink String

The URI of the created resource.

creationTime number

The time when this dataset was created, in milliseconds since the epoch.

etag string

A hash of the resource.

id string

The provider-assigned unique ID for this managed resource.

lastModifiedTime number

The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.

selfLink string

The URI of the created resource.

creation_time int

The time when this dataset was created, in milliseconds since the epoch.

etag str

A hash of the resource.

id str

The provider-assigned unique ID for this managed resource.

last_modified_time int

The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.

self_link str

The URI of the created resource.

creationTime Number

The time when this dataset was created, in milliseconds since the epoch.

etag String

A hash of the resource.

id String

The provider-assigned unique ID for this managed resource.

lastModifiedTime Number

The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.

selfLink String

The URI of the created resource.

Look up Existing Dataset Resource

Get an existing Dataset resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: DatasetState, opts?: CustomResourceOptions): Dataset
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        accesses: Optional[Sequence[DatasetAccessArgs]] = None,
        creation_time: Optional[int] = None,
        dataset_id: Optional[str] = None,
        default_encryption_configuration: Optional[DatasetDefaultEncryptionConfigurationArgs] = None,
        default_partition_expiration_ms: Optional[int] = None,
        default_table_expiration_ms: Optional[int] = None,
        delete_contents_on_destroy: Optional[bool] = None,
        description: Optional[str] = None,
        etag: Optional[str] = None,
        friendly_name: Optional[str] = None,
        labels: Optional[Mapping[str, str]] = None,
        last_modified_time: Optional[int] = None,
        location: Optional[str] = None,
        max_time_travel_hours: Optional[str] = None,
        project: Optional[str] = None,
        self_link: Optional[str] = None) -> Dataset
func GetDataset(ctx *Context, name string, id IDInput, state *DatasetState, opts ...ResourceOption) (*Dataset, error)
public static Dataset Get(string name, Input<string> id, DatasetState? state, CustomResourceOptions? opts = null)
public static Dataset get(String name, Output<String> id, DatasetState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
Accesses List<DatasetAccessArgs>

An array of objects that define dataset access for one or more entities. Structure is documented below.

CreationTime int

The time when this dataset was created, in milliseconds since the epoch.

DatasetId string

A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

DefaultEncryptionConfiguration DatasetDefaultEncryptionConfigurationArgs

The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.

DefaultPartitionExpirationMs int

The default partition expiration for all partitioned tables in the dataset, in milliseconds.

DefaultTableExpirationMs int

The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

DeleteContentsOnDestroy bool

If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.

Description string

A user-friendly description of the dataset

Etag string

A hash of the resource.

FriendlyName string

A descriptive name for the dataset

Labels Dictionary<string, string>

The labels associated with this dataset. You can use these to organize and group your datasets

LastModifiedTime int

The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.

Location string

The geographic location where the dataset should reside. See official docs.

MaxTimeTravelHours string

Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).

Project string

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

SelfLink string

The URI of the created resource.

Accesses []DatasetAccessTypeArgs

An array of objects that define dataset access for one or more entities. Structure is documented below.

CreationTime int

The time when this dataset was created, in milliseconds since the epoch.

DatasetId string

A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

DefaultEncryptionConfiguration DatasetDefaultEncryptionConfigurationArgs

The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.

DefaultPartitionExpirationMs int

The default partition expiration for all partitioned tables in the dataset, in milliseconds.

DefaultTableExpirationMs int

The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

DeleteContentsOnDestroy bool

If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.

Description string

A user-friendly description of the dataset

Etag string

A hash of the resource.

FriendlyName string

A descriptive name for the dataset

Labels map[string]string

The labels associated with this dataset. You can use these to organize and group your datasets

LastModifiedTime int

The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.

Location string

The geographic location where the dataset should reside. See official docs.

MaxTimeTravelHours string

Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).

Project string

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

SelfLink string

The URI of the created resource.

accesses List<DatasetAccessArgs>

An array of objects that define dataset access for one or more entities. Structure is documented below.

creationTime Integer

The time when this dataset was created, in milliseconds since the epoch.

datasetId String

A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

defaultEncryptionConfiguration DatasetDefaultEncryptionConfigurationArgs

The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.

defaultPartitionExpirationMs Integer

The default partition expiration for all partitioned tables in the dataset, in milliseconds.

defaultTableExpirationMs Integer

The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

deleteContentsOnDestroy Boolean

If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.

description String

A user-friendly description of the dataset

etag String

A hash of the resource.

friendlyName String

A descriptive name for the dataset

labels Map<String,String>

The labels associated with this dataset. You can use these to organize and group your datasets

lastModifiedTime Integer

The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.

location String

The geographic location where the dataset should reside. See official docs.

maxTimeTravelHours String

Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).

project String

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

selfLink String

The URI of the created resource.

accesses DatasetAccessArgs[]

An array of objects that define dataset access for one or more entities. Structure is documented below.

creationTime number

The time when this dataset was created, in milliseconds since the epoch.

datasetId string

A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

defaultEncryptionConfiguration DatasetDefaultEncryptionConfigurationArgs

The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.

defaultPartitionExpirationMs number

The default partition expiration for all partitioned tables in the dataset, in milliseconds.

defaultTableExpirationMs number

The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

deleteContentsOnDestroy boolean

If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.

description string

A user-friendly description of the dataset

etag string

A hash of the resource.

friendlyName string

A descriptive name for the dataset

labels {[key: string]: string}

The labels associated with this dataset. You can use these to organize and group your datasets

lastModifiedTime number

The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.

location string

The geographic location where the dataset should reside. See official docs.

maxTimeTravelHours string

Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).

project string

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

selfLink string

The URI of the created resource.

accesses Sequence[DatasetAccessArgs]

An array of objects that define dataset access for one or more entities. Structure is documented below.

creation_time int

The time when this dataset was created, in milliseconds since the epoch.

dataset_id str

A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

default_encryption_configuration DatasetDefaultEncryptionConfigurationArgs

The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.

default_partition_expiration_ms int

The default partition expiration for all partitioned tables in the dataset, in milliseconds.

default_table_expiration_ms int

The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

delete_contents_on_destroy bool

If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.

description str

A user-friendly description of the dataset

etag str

A hash of the resource.

friendly_name str

A descriptive name for the dataset

labels Mapping[str, str]

The labels associated with this dataset. You can use these to organize and group your datasets

last_modified_time int

The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.

location str

The geographic location where the dataset should reside. See official docs.

max_time_travel_hours str

Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).

project str

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

self_link str

The URI of the created resource.

accesses List<Property Map>

An array of objects that define dataset access for one or more entities. Structure is documented below.

creationTime Number

The time when this dataset was created, in milliseconds since the epoch.

datasetId String

A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

defaultEncryptionConfiguration Property Map

The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.

defaultPartitionExpirationMs Number

The default partition expiration for all partitioned tables in the dataset, in milliseconds.

defaultTableExpirationMs Number

The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour).

deleteContentsOnDestroy Boolean

If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.

description String

A user-friendly description of the dataset

etag String

A hash of the resource.

friendlyName String

A descriptive name for the dataset

labels Map<String>

The labels associated with this dataset. You can use these to organize and group your datasets

lastModifiedTime Number

The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.

location String

The geographic location where the dataset should reside. See official docs.

maxTimeTravelHours String

Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).

project String

The ID of the project in which the resource belongs. If it is not provided, the provider project is used.

selfLink String

The URI of the created resource.

Supporting Types

DatasetAccess

Dataset DatasetAccessDataset

Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.

Domain string

A domain to grant access to. Any users signed in with the domain specified will be granted the specified access

GroupByEmail string

An email address of a Google Group to grant access to.

Role string

Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.

Routine DatasetAccessRoutine

A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.

SpecialGroup string

A special group to grant access to. Possible values include:

UserByEmail string

An email address of a user to grant access to. For example: fred@example.com

View DatasetAccessView

A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.

Dataset DatasetAccessDataset

Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.

Domain string

A domain to grant access to. Any users signed in with the domain specified will be granted the specified access

GroupByEmail string

An email address of a Google Group to grant access to.

Role string

Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.

Routine DatasetAccessRoutine

A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.

SpecialGroup string

A special group to grant access to. Possible values include:

UserByEmail string

An email address of a user to grant access to. For example: fred@example.com

View DatasetAccessView

A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.

dataset DatasetAccessDataset

Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.

domain String

A domain to grant access to. Any users signed in with the domain specified will be granted the specified access

groupByEmail String

An email address of a Google Group to grant access to.

role String

Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.

routine DatasetAccessRoutine

A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.

specialGroup String

A special group to grant access to. Possible values include:

userByEmail String

An email address of a user to grant access to. For example: fred@example.com

view DatasetAccessView

A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.

dataset DatasetAccessDataset

Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.

domain string

A domain to grant access to. Any users signed in with the domain specified will be granted the specified access

groupByEmail string

An email address of a Google Group to grant access to.

role string

Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.

routine DatasetAccessRoutine

A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.

specialGroup string

A special group to grant access to. Possible values include:

userByEmail string

An email address of a user to grant access to. For example: fred@example.com

view DatasetAccessView

A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.

dataset DatasetAccessDataset

Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.

domain str

A domain to grant access to. Any users signed in with the domain specified will be granted the specified access

group_by_email str

An email address of a Google Group to grant access to.

role str

Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.

routine DatasetAccessRoutine

A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.

special_group str

A special group to grant access to. Possible values include:

user_by_email str

An email address of a user to grant access to. For example: fred@example.com

view DatasetAccessView

A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.

dataset Property Map

Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.

domain String

A domain to grant access to. Any users signed in with the domain specified will be granted the specified access

groupByEmail String

An email address of a Google Group to grant access to.

role String

Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.

routine Property Map

A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.

specialGroup String

A special group to grant access to. Possible values include:

userByEmail String

An email address of a user to grant access to. For example: fred@example.com

view Property Map

A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.

DatasetAccessDataset

Dataset DatasetAccessDatasetDataset

The dataset this entry applies to Structure is documented below.

TargetTypes List<string>

Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS

Dataset DatasetAccessDatasetDataset

The dataset this entry applies to Structure is documented below.

TargetTypes []string

Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS

dataset DatasetAccessDatasetDataset

The dataset this entry applies to Structure is documented below.

targetTypes List<String>

Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS

dataset DatasetAccessDatasetDataset

The dataset this entry applies to Structure is documented below.

targetTypes string[]

Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS

dataset DatasetAccessDatasetDataset

The dataset this entry applies to Structure is documented below.

target_types Sequence[str]

Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS

dataset Property Map

The dataset this entry applies to Structure is documented below.

targetTypes List<String>

Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS

DatasetAccessDatasetDataset

DatasetId string

The ID of the dataset containing this table.

ProjectId string

The ID of the project containing this table.

DatasetId string

The ID of the dataset containing this table.

ProjectId string

The ID of the project containing this table.

datasetId String

The ID of the dataset containing this table.

projectId String

The ID of the project containing this table.

datasetId string

The ID of the dataset containing this table.

projectId string

The ID of the project containing this table.

dataset_id str

The ID of the dataset containing this table.

project_id str

The ID of the project containing this table.

datasetId String

The ID of the dataset containing this table.

projectId String

The ID of the project containing this table.

DatasetAccessRoutine

DatasetId string

The ID of the dataset containing this table.

ProjectId string

The ID of the project containing this table.

RoutineId string

The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.

DatasetId string

The ID of the dataset containing this table.

ProjectId string

The ID of the project containing this table.

RoutineId string

The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.

datasetId String

The ID of the dataset containing this table.

projectId String

The ID of the project containing this table.

routineId String

The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.

datasetId string

The ID of the dataset containing this table.

projectId string

The ID of the project containing this table.

routineId string

The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.

dataset_id str

The ID of the dataset containing this table.

project_id str

The ID of the project containing this table.

routine_id str

The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.

datasetId String

The ID of the dataset containing this table.

projectId String

The ID of the project containing this table.

routineId String

The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.

DatasetAccessView

DatasetId string

The ID of the dataset containing this table.

ProjectId string

The ID of the project containing this table.

TableId string

The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

DatasetId string

The ID of the dataset containing this table.

ProjectId string

The ID of the project containing this table.

TableId string

The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

datasetId String

The ID of the dataset containing this table.

projectId String

The ID of the project containing this table.

tableId String

The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

datasetId string

The ID of the dataset containing this table.

projectId string

The ID of the project containing this table.

tableId string

The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

dataset_id str

The ID of the dataset containing this table.

project_id str

The ID of the project containing this table.

table_id str

The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

datasetId String

The ID of the dataset containing this table.

projectId String

The ID of the project containing this table.

tableId String

The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.

DatasetDefaultEncryptionConfiguration

KmsKeyName string

Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.

KmsKeyName string

Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.

kmsKeyName String

Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.

kmsKeyName string

Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.

kms_key_name str

Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.

kmsKeyName String

Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.

Package Details

Repository
Google Cloud (GCP) Classic pulumi/pulumi-gcp
License
Apache-2.0
Notes

This Pulumi package is based on the google-beta Terraform Provider.