Manage GCP BigQuery Dataset IAM Access

The gcp:bigquery/datasetIamMember:DatasetIamMember resource, part of the Pulumi GCP provider, grants IAM permissions on BigQuery datasets by adding individual members to roles without affecting other permissions. This guide focuses on three capabilities: single-member role grants, time-limited access with IAM conditions, and multi-member role bindings.

This resource references existing BigQuery datasets and grants access to users, groups, or service accounts. The examples are intentionally small. Combine them with your own dataset provisioning and identity management.

Grant a single user access to a dataset

Most IAM configurations start by granting individual users specific roles on datasets. DatasetIamMember adds one member to one role without affecting other members or roles.

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const dataset = new gcp.bigquery.Dataset("dataset", {datasetId: "example_dataset"});
const editor = new gcp.bigquery.DatasetIamMember("editor", {
    datasetId: dataset.datasetId,
    role: "roles/bigquery.dataEditor",
    member: "user:jane@example.com",
});
import pulumi
import pulumi_gcp as gcp

dataset = gcp.bigquery.Dataset("dataset", dataset_id="example_dataset")
editor = gcp.bigquery.DatasetIamMember("editor",
    dataset_id=dataset.dataset_id,
    role="roles/bigquery.dataEditor",
    member="user:jane@example.com")
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		dataset, err := bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId: pulumi.String("example_dataset"),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDatasetIamMember(ctx, "editor", &bigquery.DatasetIamMemberArgs{
			DatasetId: dataset.DatasetId,
			Role:      pulumi.String("roles/bigquery.dataEditor"),
			Member:    pulumi.String("user:jane@example.com"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
    });

    var editor = new Gcp.BigQuery.DatasetIamMember("editor", new()
    {
        DatasetId = dataset.DatasetId,
        Role = "roles/bigquery.dataEditor",
        Member = "user:jane@example.com",
    });

});
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.DatasetIamMember;
import com.pulumi.gcp.bigquery.DatasetIamMemberArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("example_dataset")
            .build());

        var editor = new DatasetIamMember("editor", DatasetIamMemberArgs.builder()
            .datasetId(dataset.datasetId())
            .role("roles/bigquery.dataEditor")
            .member("user:jane@example.com")
            .build());

    }
}
resources:
  editor:
    type: gcp:bigquery:DatasetIamMember
    properties:
      datasetId: ${dataset.datasetId}
      role: roles/bigquery.dataEditor
      member: user:jane@example.com
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset

The member property identifies who receives access using formats like user:jane@example.com, group:team@example.com, or serviceAccount:app@project.iam.gserviceaccount.com. The role property specifies the permission level; use full role names like roles/bigquery.dataEditor, not legacy names like WRITER. This resource is non-authoritative, meaning it preserves existing members on the same role.

Add time-limited access with IAM conditions

Temporary access grants expire automatically without manual cleanup, useful for contractors or time-bound projects.

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const dataset = new gcp.bigquery.Dataset("dataset", {datasetId: "example_dataset"});
const editor = new gcp.bigquery.DatasetIamMember("editor", {
    datasetId: dataset.datasetId,
    role: "roles/bigquery.dataEditor",
    member: "user:jane@example.com",
    condition: {
        title: "expires_after_2029_12_31",
        description: "Expiring at midnight of 2029-12-31",
        expression: "request.time < timestamp(\"2030-01-01T00:00:00Z\")",
    },
});
import pulumi
import pulumi_gcp as gcp

dataset = gcp.bigquery.Dataset("dataset", dataset_id="example_dataset")
editor = gcp.bigquery.DatasetIamMember("editor",
    dataset_id=dataset.dataset_id,
    role="roles/bigquery.dataEditor",
    member="user:jane@example.com",
    condition={
        "title": "expires_after_2029_12_31",
        "description": "Expiring at midnight of 2029-12-31",
        "expression": "request.time < timestamp(\"2030-01-01T00:00:00Z\")",
    })
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		dataset, err := bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId: pulumi.String("example_dataset"),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDatasetIamMember(ctx, "editor", &bigquery.DatasetIamMemberArgs{
			DatasetId: dataset.DatasetId,
			Role:      pulumi.String("roles/bigquery.dataEditor"),
			Member:    pulumi.String("user:jane@example.com"),
			Condition: &bigquery.DatasetIamMemberConditionArgs{
				Title:       pulumi.String("expires_after_2029_12_31"),
				Description: pulumi.String("Expiring at midnight of 2029-12-31"),
				Expression:  pulumi.String("request.time < timestamp(\"2030-01-01T00:00:00Z\")"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
    });

    var editor = new Gcp.BigQuery.DatasetIamMember("editor", new()
    {
        DatasetId = dataset.DatasetId,
        Role = "roles/bigquery.dataEditor",
        Member = "user:jane@example.com",
        Condition = new Gcp.BigQuery.Inputs.DatasetIamMemberConditionArgs
        {
            Title = "expires_after_2029_12_31",
            Description = "Expiring at midnight of 2029-12-31",
            Expression = "request.time < timestamp(\"2030-01-01T00:00:00Z\")",
        },
    });

});
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.DatasetIamMember;
import com.pulumi.gcp.bigquery.DatasetIamMemberArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetIamMemberConditionArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("example_dataset")
            .build());

        var editor = new DatasetIamMember("editor", DatasetIamMemberArgs.builder()
            .datasetId(dataset.datasetId())
            .role("roles/bigquery.dataEditor")
            .member("user:jane@example.com")
            .condition(DatasetIamMemberConditionArgs.builder()
                .title("expires_after_2029_12_31")
                .description("Expiring at midnight of 2029-12-31")
                .expression("request.time < timestamp(\"2030-01-01T00:00:00Z\")")
                .build())
            .build());

    }
}
resources:
  editor:
    type: gcp:bigquery:DatasetIamMember
    properties:
      datasetId: ${dataset.datasetId}
      role: roles/bigquery.dataEditor
      member: user:jane@example.com
      condition:
        title: expires_after_2029_12_31
        description: Expiring at midnight of 2029-12-31
        expression: request.time < timestamp("2030-01-01T00:00:00Z")
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset

The condition block adds time-based restrictions using CEL expressions. The expression property evaluates request time against a timestamp; when the condition becomes false, access is automatically revoked. The title and description properties document the condition’s purpose for auditing.

Grant multiple users the same role

When teams need shared access, DatasetIamBinding grants a role to multiple members at once. This resource is authoritative for the role, replacing any existing members.

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const dataset = new gcp.bigquery.Dataset("dataset", {datasetId: "example_dataset"});
const reader = new gcp.bigquery.DatasetIamBinding("reader", {
    datasetId: dataset.datasetId,
    role: "roles/bigquery.dataViewer",
    members: ["user:jane@example.com"],
});
import pulumi
import pulumi_gcp as gcp

dataset = gcp.bigquery.Dataset("dataset", dataset_id="example_dataset")
reader = gcp.bigquery.DatasetIamBinding("reader",
    dataset_id=dataset.dataset_id,
    role="roles/bigquery.dataViewer",
    members=["user:jane@example.com"])
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		dataset, err := bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId: pulumi.String("example_dataset"),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDatasetIamBinding(ctx, "reader", &bigquery.DatasetIamBindingArgs{
			DatasetId: dataset.DatasetId,
			Role:      pulumi.String("roles/bigquery.dataViewer"),
			Members: pulumi.StringArray{
				pulumi.String("user:jane@example.com"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
    });

    var reader = new Gcp.BigQuery.DatasetIamBinding("reader", new()
    {
        DatasetId = dataset.DatasetId,
        Role = "roles/bigquery.dataViewer",
        Members = new[]
        {
            "user:jane@example.com",
        },
    });

});
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.DatasetIamBinding;
import com.pulumi.gcp.bigquery.DatasetIamBindingArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("example_dataset")
            .build());

        var reader = new DatasetIamBinding("reader", DatasetIamBindingArgs.builder()
            .datasetId(dataset.datasetId())
            .role("roles/bigquery.dataViewer")
            .members("user:jane@example.com")
            .build());

    }
}
resources:
  reader:
    type: gcp:bigquery:DatasetIamBinding
    properties:
      datasetId: ${dataset.datasetId}
      role: roles/bigquery.dataViewer
      members:
        - user:jane@example.com
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset

The members property accepts a list of identities that all receive the same role. Unlike DatasetIamMember, DatasetIamBinding is authoritative for its role: it replaces all existing members with the specified list. Use this when you want to manage all members of a role together rather than adding them individually.

Beyond these examples

These snippets focus on specific IAM grant features: single-member and multi-member grants, time-based access conditions, and role-level authorization. They’re intentionally minimal rather than full access control systems.

The examples reference pre-existing infrastructure such as BigQuery datasets (referenced by datasetId) and a GCP project with BigQuery API enabled. They focus on granting permissions rather than provisioning datasets or identities.

To keep things focused, common IAM patterns are omitted, including:

  • Full policy replacement (DatasetIamPolicy)
  • Authorized views (DatasetAccess resource)
  • Custom role definitions
  • Service account creation and management

These omissions are intentional: the goal is to illustrate how each IAM grant type is wired, not provide drop-in access control modules. See the BigQuery DatasetIamMember resource reference for all available configuration options.

Let's manage GCP BigQuery Dataset IAM Access

Get started with Pulumi Cloud, then follow our quick setup guide to deploy this infrastructure.

Try Pulumi Cloud for FREE

Frequently Asked Questions

Resource Conflicts & Compatibility
Can I use DatasetIamMember with DatasetAccess or the Dataset access field?
No, gcp.bigquery.DatasetIamMember (and other DatasetIam resources) cannot be used with gcp.bigquery.DatasetAccess or the access field on gcp.bigquery.Dataset. They will conflict over policy management. Choose one approach for your dataset.
What happens to authorized view permissions when I use DatasetIam resources?
Using any DatasetIam resource removes existing authorized view permissions from the dataset. If you need to preserve authorized views, use gcp.bigquery.DatasetAccess instead.
Can I use DatasetIamPolicy with DatasetIamBinding or DatasetIamMember?
No, gcp.bigquery.DatasetIamPolicy cannot be used with gcp.bigquery.DatasetIamBinding or gcp.bigquery.DatasetIamMember. DatasetIamPolicy is authoritative for the entire policy, while Binding/Member are granular. They will conflict.
Can I use DatasetIamBinding and DatasetIamMember together?
Yes, but only if they don’t grant privileges to the same role. Each role must be managed by either Binding or Member, not both.
Role Configuration
Why can't I use OWNER, WRITER, or READER roles?
Legacy BigQuery roles (OWNER, WRITER, READER) are not supported with IAM resources. Use the full role forms instead: roles/bigquery.dataOwner, roles/bigquery.dataEditor, and roles/bigquery.dataViewer.
How do I format custom roles?
Custom roles must use the format [projects|organizations]/{parent-name}/roles/{role-name}. For example: projects/my-project/roles/customBigQueryRole.
What's the difference between DatasetIamPolicy, DatasetIamBinding, and DatasetIamMember?
DatasetIamPolicy is authoritative and replaces the entire IAM policy. DatasetIamBinding is authoritative for a specific role but preserves other roles. DatasetIamMember is non-authoritative and adds a single member to a role while preserving other members.
Member Identity Formats
What member identity formats can I use?

You can use multiple formats:

  • user:{email} for specific Google accounts
  • serviceAccount:{email} for service accounts
  • group:{email} for Google groups
  • domain:{domain} for G Suite domains
  • allUsers for anyone on the internet
  • allAuthenticatedUsers for anyone with a Google account
  • projectOwners, projectReaders, projectWriters for project-level identities
  • iamMember:{principal} for federated identities
IAM Conditions
How do I set time-based access that expires?
Use the condition property with title, description, and expression. For expiration, use an expression like request.time < timestamp("2030-01-01T00:00:00Z").

Using a different cloud?

Explore security guides for other cloud providers: