The gcp:bigquery/dataTransferConfig:DataTransferConfig resource, part of the Pulumi GCP provider, defines a BigQuery Data Transfer configuration that automates data movement into BigQuery on a schedule. This guide focuses on three capabilities: scheduled SQL query execution, customer-managed encryption (CMEK), and third-party connector configuration.
Transfer configs depend on BigQuery datasets, IAM permissions for the Data Transfer service account, and potentially Cloud KMS keys or OAuth credentials for external sources. The examples are intentionally small. Combine them with your own datasets, encryption policies, and authentication setup.
Schedule recurring SQL queries to populate tables
Analytics teams often need to run the same SQL query on a schedule to refresh materialized views or aggregate data into summary tables.
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = gcp.organizations.getProject({});
const permissions = new gcp.projects.IAMMember("permissions", {
project: project.then(project => project.projectId),
role: "roles/iam.serviceAccountTokenCreator",
member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com`),
});
const myDataset = new gcp.bigquery.Dataset("my_dataset", {
datasetId: "my_dataset",
friendlyName: "foo",
description: "bar",
location: "asia-northeast1",
}, {
dependsOn: [permissions],
});
const queryConfig = new gcp.bigquery.DataTransferConfig("query_config", {
displayName: "my-query",
location: "asia-northeast1",
dataSourceId: "scheduled_query",
schedule: "first sunday of quarter 00:00",
destinationDatasetId: myDataset.datasetId,
params: {
destination_table_name_template: "my_table",
write_disposition: "WRITE_APPEND",
query: "SELECT name FROM tabl WHERE x = 'y'",
},
}, {
dependsOn: [permissions],
});
import pulumi
import pulumi_gcp as gcp
project = gcp.organizations.get_project()
permissions = gcp.projects.IAMMember("permissions",
project=project.project_id,
role="roles/iam.serviceAccountTokenCreator",
member=f"serviceAccount:service-{project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com")
my_dataset = gcp.bigquery.Dataset("my_dataset",
dataset_id="my_dataset",
friendly_name="foo",
description="bar",
location="asia-northeast1",
opts = pulumi.ResourceOptions(depends_on=[permissions]))
query_config = gcp.bigquery.DataTransferConfig("query_config",
display_name="my-query",
location="asia-northeast1",
data_source_id="scheduled_query",
schedule="first sunday of quarter 00:00",
destination_dataset_id=my_dataset.dataset_id,
params={
"destination_table_name_template": "my_table",
"write_disposition": "WRITE_APPEND",
"query": "SELECT name FROM tabl WHERE x = 'y'",
},
opts = pulumi.ResourceOptions(depends_on=[permissions]))
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/bigquery"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/organizations"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/projects"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
if err != nil {
return err
}
permissions, err := projects.NewIAMMember(ctx, "permissions", &projects.IAMMemberArgs{
Project: pulumi.String(project.ProjectId),
Role: pulumi.String("roles/iam.serviceAccountTokenCreator"),
Member: pulumi.Sprintf("serviceAccount:service-%v@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com", project.Number),
})
if err != nil {
return err
}
myDataset, err := bigquery.NewDataset(ctx, "my_dataset", &bigquery.DatasetArgs{
DatasetId: pulumi.String("my_dataset"),
FriendlyName: pulumi.String("foo"),
Description: pulumi.String("bar"),
Location: pulumi.String("asia-northeast1"),
}, pulumi.DependsOn([]pulumi.Resource{
permissions,
}))
if err != nil {
return err
}
_, err = bigquery.NewDataTransferConfig(ctx, "query_config", &bigquery.DataTransferConfigArgs{
DisplayName: pulumi.String("my-query"),
Location: pulumi.String("asia-northeast1"),
DataSourceId: pulumi.String("scheduled_query"),
Schedule: pulumi.String("first sunday of quarter 00:00"),
DestinationDatasetId: myDataset.DatasetId,
Params: pulumi.StringMap{
"destination_table_name_template": pulumi.String("my_table"),
"write_disposition": pulumi.String("WRITE_APPEND"),
"query": pulumi.String("SELECT name FROM tabl WHERE x = 'y'"),
},
}, pulumi.DependsOn([]pulumi.Resource{
permissions,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = Gcp.Organizations.GetProject.Invoke();
var permissions = new Gcp.Projects.IAMMember("permissions", new()
{
Project = project.Apply(getProjectResult => getProjectResult.ProjectId),
Role = "roles/iam.serviceAccountTokenCreator",
Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com",
});
var myDataset = new Gcp.BigQuery.Dataset("my_dataset", new()
{
DatasetId = "my_dataset",
FriendlyName = "foo",
Description = "bar",
Location = "asia-northeast1",
}, new CustomResourceOptions
{
DependsOn =
{
permissions,
},
});
var queryConfig = new Gcp.BigQuery.DataTransferConfig("query_config", new()
{
DisplayName = "my-query",
Location = "asia-northeast1",
DataSourceId = "scheduled_query",
Schedule = "first sunday of quarter 00:00",
DestinationDatasetId = myDataset.DatasetId,
Params =
{
{ "destination_table_name_template", "my_table" },
{ "write_disposition", "WRITE_APPEND" },
{ "query", "SELECT name FROM tabl WHERE x = 'y'" },
},
}, new CustomResourceOptions
{
DependsOn =
{
permissions,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.projects.IAMMember;
import com.pulumi.gcp.projects.IAMMemberArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.DataTransferConfig;
import com.pulumi.gcp.bigquery.DataTransferConfigArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = OrganizationsFunctions.getProject(GetProjectArgs.builder()
.build());
var permissions = new IAMMember("permissions", IAMMemberArgs.builder()
.project(project.projectId())
.role("roles/iam.serviceAccountTokenCreator")
.member(String.format("serviceAccount:service-%s@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com", project.number()))
.build());
var myDataset = new Dataset("myDataset", DatasetArgs.builder()
.datasetId("my_dataset")
.friendlyName("foo")
.description("bar")
.location("asia-northeast1")
.build(), CustomResourceOptions.builder()
.dependsOn(permissions)
.build());
var queryConfig = new DataTransferConfig("queryConfig", DataTransferConfigArgs.builder()
.displayName("my-query")
.location("asia-northeast1")
.dataSourceId("scheduled_query")
.schedule("first sunday of quarter 00:00")
.destinationDatasetId(myDataset.datasetId())
.params(Map.ofEntries(
Map.entry("destination_table_name_template", "my_table"),
Map.entry("write_disposition", "WRITE_APPEND"),
Map.entry("query", "SELECT name FROM tabl WHERE x = 'y'")
))
.build(), CustomResourceOptions.builder()
.dependsOn(permissions)
.build());
}
}
resources:
permissions:
type: gcp:projects:IAMMember
properties:
project: ${project.projectId}
role: roles/iam.serviceAccountTokenCreator
member: serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com
queryConfig:
type: gcp:bigquery:DataTransferConfig
name: query_config
properties:
displayName: my-query
location: asia-northeast1
dataSourceId: scheduled_query
schedule: first sunday of quarter 00:00
destinationDatasetId: ${myDataset.datasetId}
params:
destination_table_name_template: my_table
write_disposition: WRITE_APPEND
query: SELECT name FROM tabl WHERE x = 'y'
options:
dependsOn:
- ${permissions}
myDataset:
type: gcp:bigquery:Dataset
name: my_dataset
properties:
datasetId: my_dataset
friendlyName: foo
description: bar
location: asia-northeast1
options:
dependsOn:
- ${permissions}
variables:
project:
fn::invoke:
function: gcp:organizations:getProject
arguments: {}
The dataSourceId property set to “scheduled_query” tells BigQuery to execute SQL on the specified schedule. The params map contains the SQL query, the destination table name template, and the write disposition (WRITE_APPEND adds rows without truncating). The schedule property uses cron-like syntax; “first sunday of quarter 00:00” runs quarterly. Note the IAM member grants the BigQuery Data Transfer service account permission to create tokens, which is required for the transfer to run.
Encrypt transferred data with customer-managed keys
Organizations with strict data governance requirements often mandate customer-managed encryption keys (CMEK) for data at rest.
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = gcp.organizations.getProject({});
const permissions = new gcp.projects.IAMMember("permissions", {
project: project.then(project => project.projectId),
role: "roles/iam.serviceAccountTokenCreator",
member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com`),
});
const myDataset = new gcp.bigquery.Dataset("my_dataset", {
datasetId: "example_dataset",
friendlyName: "foo",
description: "bar",
location: "asia-northeast1",
}, {
dependsOn: [permissions],
});
const keyRing = new gcp.kms.KeyRing("key_ring", {
name: "example-keyring",
location: "us",
});
const cryptoKey = new gcp.kms.CryptoKey("crypto_key", {
name: "example-key",
keyRing: keyRing.id,
});
const queryConfigCmek = new gcp.bigquery.DataTransferConfig("query_config_cmek", {
displayName: "display-name",
location: "asia-northeast1",
dataSourceId: "scheduled_query",
schedule: "first sunday of quarter 00:00",
destinationDatasetId: myDataset.datasetId,
params: {
destination_table_name_template: "my_table",
write_disposition: "WRITE_APPEND",
query: "SELECT name FROM tabl WHERE x = 'y'",
},
encryptionConfiguration: {
kmsKeyName: cryptoKey.id,
},
}, {
dependsOn: [permissions],
});
import pulumi
import pulumi_gcp as gcp
project = gcp.organizations.get_project()
permissions = gcp.projects.IAMMember("permissions",
project=project.project_id,
role="roles/iam.serviceAccountTokenCreator",
member=f"serviceAccount:service-{project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com")
my_dataset = gcp.bigquery.Dataset("my_dataset",
dataset_id="example_dataset",
friendly_name="foo",
description="bar",
location="asia-northeast1",
opts = pulumi.ResourceOptions(depends_on=[permissions]))
key_ring = gcp.kms.KeyRing("key_ring",
name="example-keyring",
location="us")
crypto_key = gcp.kms.CryptoKey("crypto_key",
name="example-key",
key_ring=key_ring.id)
query_config_cmek = gcp.bigquery.DataTransferConfig("query_config_cmek",
display_name="display-name",
location="asia-northeast1",
data_source_id="scheduled_query",
schedule="first sunday of quarter 00:00",
destination_dataset_id=my_dataset.dataset_id,
params={
"destination_table_name_template": "my_table",
"write_disposition": "WRITE_APPEND",
"query": "SELECT name FROM tabl WHERE x = 'y'",
},
encryption_configuration={
"kms_key_name": crypto_key.id,
},
opts = pulumi.ResourceOptions(depends_on=[permissions]))
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/bigquery"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/kms"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/organizations"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/projects"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
if err != nil {
return err
}
permissions, err := projects.NewIAMMember(ctx, "permissions", &projects.IAMMemberArgs{
Project: pulumi.String(project.ProjectId),
Role: pulumi.String("roles/iam.serviceAccountTokenCreator"),
Member: pulumi.Sprintf("serviceAccount:service-%v@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com", project.Number),
})
if err != nil {
return err
}
myDataset, err := bigquery.NewDataset(ctx, "my_dataset", &bigquery.DatasetArgs{
DatasetId: pulumi.String("example_dataset"),
FriendlyName: pulumi.String("foo"),
Description: pulumi.String("bar"),
Location: pulumi.String("asia-northeast1"),
}, pulumi.DependsOn([]pulumi.Resource{
permissions,
}))
if err != nil {
return err
}
keyRing, err := kms.NewKeyRing(ctx, "key_ring", &kms.KeyRingArgs{
Name: pulumi.String("example-keyring"),
Location: pulumi.String("us"),
})
if err != nil {
return err
}
cryptoKey, err := kms.NewCryptoKey(ctx, "crypto_key", &kms.CryptoKeyArgs{
Name: pulumi.String("example-key"),
KeyRing: keyRing.ID(),
})
if err != nil {
return err
}
_, err = bigquery.NewDataTransferConfig(ctx, "query_config_cmek", &bigquery.DataTransferConfigArgs{
DisplayName: pulumi.String("display-name"),
Location: pulumi.String("asia-northeast1"),
DataSourceId: pulumi.String("scheduled_query"),
Schedule: pulumi.String("first sunday of quarter 00:00"),
DestinationDatasetId: myDataset.DatasetId,
Params: pulumi.StringMap{
"destination_table_name_template": pulumi.String("my_table"),
"write_disposition": pulumi.String("WRITE_APPEND"),
"query": pulumi.String("SELECT name FROM tabl WHERE x = 'y'"),
},
EncryptionConfiguration: &bigquery.DataTransferConfigEncryptionConfigurationArgs{
KmsKeyName: cryptoKey.ID(),
},
}, pulumi.DependsOn([]pulumi.Resource{
permissions,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = Gcp.Organizations.GetProject.Invoke();
var permissions = new Gcp.Projects.IAMMember("permissions", new()
{
Project = project.Apply(getProjectResult => getProjectResult.ProjectId),
Role = "roles/iam.serviceAccountTokenCreator",
Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com",
});
var myDataset = new Gcp.BigQuery.Dataset("my_dataset", new()
{
DatasetId = "example_dataset",
FriendlyName = "foo",
Description = "bar",
Location = "asia-northeast1",
}, new CustomResourceOptions
{
DependsOn =
{
permissions,
},
});
var keyRing = new Gcp.Kms.KeyRing("key_ring", new()
{
Name = "example-keyring",
Location = "us",
});
var cryptoKey = new Gcp.Kms.CryptoKey("crypto_key", new()
{
Name = "example-key",
KeyRing = keyRing.Id,
});
var queryConfigCmek = new Gcp.BigQuery.DataTransferConfig("query_config_cmek", new()
{
DisplayName = "display-name",
Location = "asia-northeast1",
DataSourceId = "scheduled_query",
Schedule = "first sunday of quarter 00:00",
DestinationDatasetId = myDataset.DatasetId,
Params =
{
{ "destination_table_name_template", "my_table" },
{ "write_disposition", "WRITE_APPEND" },
{ "query", "SELECT name FROM tabl WHERE x = 'y'" },
},
EncryptionConfiguration = new Gcp.BigQuery.Inputs.DataTransferConfigEncryptionConfigurationArgs
{
KmsKeyName = cryptoKey.Id,
},
}, new CustomResourceOptions
{
DependsOn =
{
permissions,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.projects.IAMMember;
import com.pulumi.gcp.projects.IAMMemberArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.kms.KeyRing;
import com.pulumi.gcp.kms.KeyRingArgs;
import com.pulumi.gcp.kms.CryptoKey;
import com.pulumi.gcp.kms.CryptoKeyArgs;
import com.pulumi.gcp.bigquery.DataTransferConfig;
import com.pulumi.gcp.bigquery.DataTransferConfigArgs;
import com.pulumi.gcp.bigquery.inputs.DataTransferConfigEncryptionConfigurationArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = OrganizationsFunctions.getProject(GetProjectArgs.builder()
.build());
var permissions = new IAMMember("permissions", IAMMemberArgs.builder()
.project(project.projectId())
.role("roles/iam.serviceAccountTokenCreator")
.member(String.format("serviceAccount:service-%s@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com", project.number()))
.build());
var myDataset = new Dataset("myDataset", DatasetArgs.builder()
.datasetId("example_dataset")
.friendlyName("foo")
.description("bar")
.location("asia-northeast1")
.build(), CustomResourceOptions.builder()
.dependsOn(permissions)
.build());
var keyRing = new KeyRing("keyRing", KeyRingArgs.builder()
.name("example-keyring")
.location("us")
.build());
var cryptoKey = new CryptoKey("cryptoKey", CryptoKeyArgs.builder()
.name("example-key")
.keyRing(keyRing.id())
.build());
var queryConfigCmek = new DataTransferConfig("queryConfigCmek", DataTransferConfigArgs.builder()
.displayName("display-name")
.location("asia-northeast1")
.dataSourceId("scheduled_query")
.schedule("first sunday of quarter 00:00")
.destinationDatasetId(myDataset.datasetId())
.params(Map.ofEntries(
Map.entry("destination_table_name_template", "my_table"),
Map.entry("write_disposition", "WRITE_APPEND"),
Map.entry("query", "SELECT name FROM tabl WHERE x = 'y'")
))
.encryptionConfiguration(DataTransferConfigEncryptionConfigurationArgs.builder()
.kmsKeyName(cryptoKey.id())
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(permissions)
.build());
}
}
resources:
permissions:
type: gcp:projects:IAMMember
properties:
project: ${project.projectId}
role: roles/iam.serviceAccountTokenCreator
member: serviceAccount:service-${project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com
queryConfigCmek:
type: gcp:bigquery:DataTransferConfig
name: query_config_cmek
properties:
displayName: display-name
location: asia-northeast1
dataSourceId: scheduled_query
schedule: first sunday of quarter 00:00
destinationDatasetId: ${myDataset.datasetId}
params:
destination_table_name_template: my_table
write_disposition: WRITE_APPEND
query: SELECT name FROM tabl WHERE x = 'y'
encryptionConfiguration:
kmsKeyName: ${cryptoKey.id}
options:
dependsOn:
- ${permissions}
myDataset:
type: gcp:bigquery:Dataset
name: my_dataset
properties:
datasetId: example_dataset
friendlyName: foo
description: bar
location: asia-northeast1
options:
dependsOn:
- ${permissions}
cryptoKey:
type: gcp:kms:CryptoKey
name: crypto_key
properties:
name: example-key
keyRing: ${keyRing.id}
keyRing:
type: gcp:kms:KeyRing
name: key_ring
properties:
name: example-keyring
location: us
variables:
project:
fn::invoke:
function: gcp:organizations:getProject
arguments: {}
The encryptionConfiguration block references a Cloud KMS CryptoKey by ID. BigQuery encrypts data written to the destination dataset using this key. The BigQuery service account needs encrypt/decrypt permissions on the KMS key. This extends the basic scheduled query pattern with encryption controls.
Import Salesforce data into BigQuery tables
Sales and marketing teams need to analyze Salesforce data alongside other business metrics in BigQuery.
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = gcp.organizations.getProject({});
const myDataset = new gcp.bigquery.Dataset("my_dataset", {
datasetId: "my_dataset",
description: "My dataset",
location: "asia-northeast1",
});
const salesforceConfig = new gcp.bigquery.DataTransferConfig("salesforce_config", {
displayName: "my-salesforce-config",
location: "asia-northeast1",
dataSourceId: "salesforce",
schedule: "first sunday of quarter 00:00",
destinationDatasetId: myDataset.datasetId,
params: {
"connector.authentication.oauth.clientId": "client-id",
"connector.authentication.oauth.clientSecret": "client-secret",
"connector.authentication.oauth.myDomain": "MyDomainName",
assets: "[\"asset-a\",\"asset-b\"]",
},
});
import pulumi
import pulumi_gcp as gcp
project = gcp.organizations.get_project()
my_dataset = gcp.bigquery.Dataset("my_dataset",
dataset_id="my_dataset",
description="My dataset",
location="asia-northeast1")
salesforce_config = gcp.bigquery.DataTransferConfig("salesforce_config",
display_name="my-salesforce-config",
location="asia-northeast1",
data_source_id="salesforce",
schedule="first sunday of quarter 00:00",
destination_dataset_id=my_dataset.dataset_id,
params={
"connector.authentication.oauth.clientId": "client-id",
"connector.authentication.oauth.clientSecret": "client-secret",
"connector.authentication.oauth.myDomain": "MyDomainName",
"assets": "[\"asset-a\",\"asset-b\"]",
})
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/bigquery"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/organizations"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
if err != nil {
return err
}
myDataset, err := bigquery.NewDataset(ctx, "my_dataset", &bigquery.DatasetArgs{
DatasetId: pulumi.String("my_dataset"),
Description: pulumi.String("My dataset"),
Location: pulumi.String("asia-northeast1"),
})
if err != nil {
return err
}
_, err = bigquery.NewDataTransferConfig(ctx, "salesforce_config", &bigquery.DataTransferConfigArgs{
DisplayName: pulumi.String("my-salesforce-config"),
Location: pulumi.String("asia-northeast1"),
DataSourceId: pulumi.String("salesforce"),
Schedule: pulumi.String("first sunday of quarter 00:00"),
DestinationDatasetId: myDataset.DatasetId,
Params: pulumi.StringMap{
"connector.authentication.oauth.clientId": pulumi.String("client-id"),
"connector.authentication.oauth.clientSecret": pulumi.String("client-secret"),
"connector.authentication.oauth.myDomain": pulumi.String("MyDomainName"),
"assets": pulumi.String("[\"asset-a\",\"asset-b\"]"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = Gcp.Organizations.GetProject.Invoke();
var myDataset = new Gcp.BigQuery.Dataset("my_dataset", new()
{
DatasetId = "my_dataset",
Description = "My dataset",
Location = "asia-northeast1",
});
var salesforceConfig = new Gcp.BigQuery.DataTransferConfig("salesforce_config", new()
{
DisplayName = "my-salesforce-config",
Location = "asia-northeast1",
DataSourceId = "salesforce",
Schedule = "first sunday of quarter 00:00",
DestinationDatasetId = myDataset.DatasetId,
Params =
{
{ "connector.authentication.oauth.clientId", "client-id" },
{ "connector.authentication.oauth.clientSecret", "client-secret" },
{ "connector.authentication.oauth.myDomain", "MyDomainName" },
{ "assets", "[\"asset-a\",\"asset-b\"]" },
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.DataTransferConfig;
import com.pulumi.gcp.bigquery.DataTransferConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = OrganizationsFunctions.getProject(GetProjectArgs.builder()
.build());
var myDataset = new Dataset("myDataset", DatasetArgs.builder()
.datasetId("my_dataset")
.description("My dataset")
.location("asia-northeast1")
.build());
var salesforceConfig = new DataTransferConfig("salesforceConfig", DataTransferConfigArgs.builder()
.displayName("my-salesforce-config")
.location("asia-northeast1")
.dataSourceId("salesforce")
.schedule("first sunday of quarter 00:00")
.destinationDatasetId(myDataset.datasetId())
.params(Map.ofEntries(
Map.entry("connector.authentication.oauth.clientId", "client-id"),
Map.entry("connector.authentication.oauth.clientSecret", "client-secret"),
Map.entry("connector.authentication.oauth.myDomain", "MyDomainName"),
Map.entry("assets", "[\"asset-a\",\"asset-b\"]")
))
.build());
}
}
resources:
myDataset:
type: gcp:bigquery:Dataset
name: my_dataset
properties:
datasetId: my_dataset
description: My dataset
location: asia-northeast1
salesforceConfig:
type: gcp:bigquery:DataTransferConfig
name: salesforce_config
properties:
displayName: my-salesforce-config
location: asia-northeast1
dataSourceId: salesforce
schedule: first sunday of quarter 00:00
destinationDatasetId: ${myDataset.datasetId}
params:
connector.authentication.oauth.clientId: client-id
connector.authentication.oauth.clientSecret: client-secret
connector.authentication.oauth.myDomain: MyDomainName
assets: '["asset-a","asset-b"]'
variables:
project:
fn::invoke:
function: gcp:organizations:getProject
arguments: {}
The dataSourceId property set to “salesforce” activates the Salesforce connector. The params map contains OAuth credentials (clientId, clientSecret) and your Salesforce domain name. The assets parameter lists which Salesforce objects to transfer (e.g., “asset-a”, “asset-b”). You must configure OAuth credentials in Salesforce and replace MyDomainName with your actual Salesforce instance domain.
Beyond these examples
These snippets focus on specific Data Transfer configuration features: scheduled query execution, customer-managed encryption, and third-party data source connectors. They’re intentionally minimal rather than full data pipeline solutions.
The examples may reference pre-existing infrastructure such as BigQuery datasets (created inline but required before transfer config), IAM service account permissions for BigQuery Data Transfer, Cloud KMS keys and permissions (for CMEK example), and Salesforce OAuth credentials (for Salesforce example). They focus on configuring the transfer rather than provisioning everything around it.
To keep things focused, common transfer patterns are omitted, including:
- Pub/Sub notifications for transfer completion (notificationPubsubTopic)
- Schedule customization options (scheduleOptions)
- Service account impersonation (serviceAccountName)
- Sensitive parameter handling (sensitiveParams for secrets)
- Transfer disabling and data refresh windows
These omissions are intentional: the goal is to illustrate how each transfer feature is wired, not provide drop-in data pipeline modules. See the BigQuery DataTransferConfig resource reference for all available configuration options.
Let's configure GCP BigQuery Data Transfer
Get started with Pulumi Cloud, then follow our quick setup guide to deploy this infrastructure.
Try Pulumi Cloud for FREEFrequently Asked Questions
Configuration & Setup
roles/iam.serviceAccountTokenCreator permission. Create an IAMMember resource granting this role to serviceAccount:service-{PROJECT_NUMBER}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com and use dependsOn to ensure it’s created before the transfer config.dataSourceId, location, and project are immutable. Changing these requires recreating the resource.serviceAccountName to the service account email. You must have permissions to act as that service account.Parameters & Credentials
params for regular configuration and sensitiveParams for secrets/passwords that should be hidden from plan output. Credentials cannot be specified in both locations or an error will occur.params cannot be updated due to API limitations. If you encounter update errors, force recreation of the resource.sensitive_params.secret_access_key_wo are not stored in Pulumi state by design.Scheduling & Execution
first sunday of quarter 00:00 or every wed,fri of jan,jun 13:15. The minimum interval depends on your data source.disabled to true to prevent scheduled runs without removing the configuration.Security & Encryption
encryptionConfiguration with kmsKeyName set to your KMS crypto key ID.Using a different cloud?
Explore analytics guides for other cloud providers: