The gcp:cloudfunctionsv2/function:Function resource, part of the Pulumi GCP provider, defines a Cloud Function that executes code in response to HTTP requests or events from other Google Cloud services. This guide focuses on three capabilities: HTTP and event-driven deployment, Secret Manager integration, and VPC networking for private resource access.
Functions require Cloud Storage buckets for source code and may reference Pub/Sub topics, Secret Manager secrets, or VPC networks. The examples are intentionally small. Combine them with your own IAM roles, event sources, and networking infrastructure.
Deploy a function from Cloud Storage source
Most deployments package code into a ZIP file, upload it to Cloud Storage, and configure the runtime and entry point.
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "function-v2",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs20",
entryPoint: "helloHttp",
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
},
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
function = gcp.cloudfunctionsv2.Function("function",
name="function-v2",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs20",
"entry_point": "helloHttp",
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
})
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("function-v2"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs20"),
EntryPoint: pulumi.String("helloHttp"),
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "function-v2",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs20",
EntryPoint = "helloHttp",
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var function = new Function("function", FunctionArgs.builder()
.name("function-v2")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs20")
.entryPoint("helloHttp")
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.build())
.build());
}
}
resources:
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: function-v2
location: us-central1
description: a new function
buildConfig:
runtime: nodejs20
entryPoint: helloHttp
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
variables:
project: my-project-name
The buildConfig block specifies the runtime (Node.js 20), entry point function name, and source location in Cloud Storage. The serviceConfig block controls scaling and resource limits: maxInstanceCount caps concurrent instances, availableMemory sets memory per instance, and timeoutSeconds defines execution limits. Without an eventTrigger, the function responds to HTTP requests at the generated URL.
Trigger functions from Pub/Sub events
Event-driven architectures use Pub/Sub to decouple services, allowing functions to process messages asynchronously.
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const account = new gcp.serviceaccount.Account("account", {
accountId: "gcf-sa",
displayName: "Test Service Account",
});
const topic = new gcp.pubsub.Topic("topic", {name: "functions2-topic"});
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "gcf-function",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs20",
entryPoint: "helloPubSub",
environmentVariables: {
BUILD_CONFIG_TEST: "build_test",
},
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 3,
minInstanceCount: 1,
availableMemory: "4Gi",
timeoutSeconds: 60,
maxInstanceRequestConcurrency: 80,
availableCpu: "4",
environmentVariables: {
SERVICE_CONFIG_TEST: "config_test",
SERVICE_CONFIG_DIFF_TEST: account.email,
},
ingressSettings: "ALLOW_INTERNAL_ONLY",
allTrafficOnLatestRevision: true,
serviceAccountEmail: account.email,
},
eventTrigger: {
triggerRegion: "us-central1",
eventType: "google.cloud.pubsub.topic.v1.messagePublished",
pubsubTopic: topic.id,
retryPolicy: "RETRY_POLICY_RETRY",
},
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
account = gcp.serviceaccount.Account("account",
account_id="gcf-sa",
display_name="Test Service Account")
topic = gcp.pubsub.Topic("topic", name="functions2-topic")
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
function = gcp.cloudfunctionsv2.Function("function",
name="gcf-function",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs20",
"entry_point": "helloPubSub",
"environment_variables": {
"BUILD_CONFIG_TEST": "build_test",
},
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 3,
"min_instance_count": 1,
"available_memory": "4Gi",
"timeout_seconds": 60,
"max_instance_request_concurrency": 80,
"available_cpu": "4",
"environment_variables": {
"SERVICE_CONFIG_TEST": "config_test",
"SERVICE_CONFIG_DIFF_TEST": account.email,
},
"ingress_settings": "ALLOW_INTERNAL_ONLY",
"all_traffic_on_latest_revision": True,
"service_account_email": account.email,
},
event_trigger={
"trigger_region": "us-central1",
"event_type": "google.cloud.pubsub.topic.v1.messagePublished",
"pubsub_topic": topic.id,
"retry_policy": "RETRY_POLICY_RETRY",
})
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/pubsub"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/serviceaccount"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
account, err := serviceaccount.NewAccount(ctx, "account", &serviceaccount.AccountArgs{
AccountId: pulumi.String("gcf-sa"),
DisplayName: pulumi.String("Test Service Account"),
})
if err != nil {
return err
}
topic, err := pubsub.NewTopic(ctx, "topic", &pubsub.TopicArgs{
Name: pulumi.String("functions2-topic"),
})
if err != nil {
return err
}
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("gcf-function"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs20"),
EntryPoint: pulumi.String("helloPubSub"),
EnvironmentVariables: pulumi.StringMap{
"BUILD_CONFIG_TEST": pulumi.String("build_test"),
},
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(3),
MinInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("4Gi"),
TimeoutSeconds: pulumi.Int(60),
MaxInstanceRequestConcurrency: pulumi.Int(80),
AvailableCpu: pulumi.String("4"),
EnvironmentVariables: pulumi.StringMap{
"SERVICE_CONFIG_TEST": pulumi.String("config_test"),
"SERVICE_CONFIG_DIFF_TEST": account.Email,
},
IngressSettings: pulumi.String("ALLOW_INTERNAL_ONLY"),
AllTrafficOnLatestRevision: pulumi.Bool(true),
ServiceAccountEmail: account.Email,
},
EventTrigger: &cloudfunctionsv2.FunctionEventTriggerArgs{
TriggerRegion: pulumi.String("us-central1"),
EventType: pulumi.String("google.cloud.pubsub.topic.v1.messagePublished"),
PubsubTopic: topic.ID(),
RetryPolicy: pulumi.String("RETRY_POLICY_RETRY"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var account = new Gcp.ServiceAccount.Account("account", new()
{
AccountId = "gcf-sa",
DisplayName = "Test Service Account",
});
var topic = new Gcp.PubSub.Topic("topic", new()
{
Name = "functions2-topic",
});
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "gcf-function",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs20",
EntryPoint = "helloPubSub",
EnvironmentVariables =
{
{ "BUILD_CONFIG_TEST", "build_test" },
},
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 3,
MinInstanceCount = 1,
AvailableMemory = "4Gi",
TimeoutSeconds = 60,
MaxInstanceRequestConcurrency = 80,
AvailableCpu = "4",
EnvironmentVariables =
{
{ "SERVICE_CONFIG_TEST", "config_test" },
{ "SERVICE_CONFIG_DIFF_TEST", account.Email },
},
IngressSettings = "ALLOW_INTERNAL_ONLY",
AllTrafficOnLatestRevision = true,
ServiceAccountEmail = account.Email,
},
EventTrigger = new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerArgs
{
TriggerRegion = "us-central1",
EventType = "google.cloud.pubsub.topic.v1.messagePublished",
PubsubTopic = topic.Id,
RetryPolicy = "RETRY_POLICY_RETRY",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.pubsub.Topic;
import com.pulumi.gcp.pubsub.TopicArgs;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionEventTriggerArgs;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var account = new Account("account", AccountArgs.builder()
.accountId("gcf-sa")
.displayName("Test Service Account")
.build());
var topic = new Topic("topic", TopicArgs.builder()
.name("functions2-topic")
.build());
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var function = new Function("function", FunctionArgs.builder()
.name("gcf-function")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs20")
.entryPoint("helloPubSub")
.environmentVariables(Map.of("BUILD_CONFIG_TEST", "build_test"))
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(3)
.minInstanceCount(1)
.availableMemory("4Gi")
.timeoutSeconds(60)
.maxInstanceRequestConcurrency(80)
.availableCpu("4")
.environmentVariables(Map.ofEntries(
Map.entry("SERVICE_CONFIG_TEST", "config_test"),
Map.entry("SERVICE_CONFIG_DIFF_TEST", account.email())
))
.ingressSettings("ALLOW_INTERNAL_ONLY")
.allTrafficOnLatestRevision(true)
.serviceAccountEmail(account.email())
.build())
.eventTrigger(FunctionEventTriggerArgs.builder()
.triggerRegion("us-central1")
.eventType("google.cloud.pubsub.topic.v1.messagePublished")
.pubsubTopic(topic.id())
.retryPolicy("RETRY_POLICY_RETRY")
.build())
.build());
}
}
resources:
account:
type: gcp:serviceaccount:Account
properties:
accountId: gcf-sa
displayName: Test Service Account
topic:
type: gcp:pubsub:Topic
properties:
name: functions2-topic
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: gcf-function
location: us-central1
description: a new function
buildConfig:
runtime: nodejs20
entryPoint: helloPubSub
environmentVariables:
BUILD_CONFIG_TEST: build_test
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 3
minInstanceCount: 1
availableMemory: 4Gi
timeoutSeconds: 60
maxInstanceRequestConcurrency: 80
availableCpu: '4'
environmentVariables:
SERVICE_CONFIG_TEST: config_test
SERVICE_CONFIG_DIFF_TEST: ${account.email}
ingressSettings: ALLOW_INTERNAL_ONLY
allTrafficOnLatestRevision: true
serviceAccountEmail: ${account.email}
eventTrigger:
triggerRegion: us-central1
eventType: google.cloud.pubsub.topic.v1.messagePublished
pubsubTopic: ${topic.id}
retryPolicy: RETRY_POLICY_RETRY
variables:
project: my-project-name
When a message arrives on the Pub/Sub topic, Cloud Functions invokes your function with the message payload. The eventTrigger block defines the event source: eventType specifies the Pub/Sub message event, pubsubTopic references the topic ID, and retryPolicy controls failure handling. The serviceAccountEmail grants the function permissions to access other Google Cloud services. Environment variables in both buildConfig and serviceConfig pass configuration to build-time and runtime environments.
Inject secrets as environment variables
Applications need credentials without hardcoding them in source code. Secret Manager integration provides secure access at runtime.
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const secret = new gcp.secretmanager.Secret("secret", {
secretId: "secret",
replication: {
userManaged: {
replicas: [{
location: "us-central1",
}],
},
},
});
const secretSecretVersion = new gcp.secretmanager.SecretVersion("secret", {
secret: secret.name,
secretData: "secret",
enabled: true,
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "function-secret",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs20",
entryPoint: "helloHttp",
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
secretEnvironmentVariables: [{
key: "TEST",
projectId: project,
secret: secret.secretId,
version: "latest",
}],
},
}, {
dependsOn: [secretSecretVersion],
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
secret = gcp.secretmanager.Secret("secret",
secret_id="secret",
replication={
"user_managed": {
"replicas": [{
"location": "us-central1",
}],
},
})
secret_secret_version = gcp.secretmanager.SecretVersion("secret",
secret=secret.name,
secret_data="secret",
enabled=True)
function = gcp.cloudfunctionsv2.Function("function",
name="function-secret",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs20",
"entry_point": "helloHttp",
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
"secret_environment_variables": [{
"key": "TEST",
"project_id": project,
"secret": secret.secret_id,
"version": "latest",
}],
},
opts = pulumi.ResourceOptions(depends_on=[secret_secret_version]))
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/secretmanager"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
secret, err := secretmanager.NewSecret(ctx, "secret", &secretmanager.SecretArgs{
SecretId: pulumi.String("secret"),
Replication: &secretmanager.SecretReplicationArgs{
UserManaged: &secretmanager.SecretReplicationUserManagedArgs{
Replicas: secretmanager.SecretReplicationUserManagedReplicaArray{
&secretmanager.SecretReplicationUserManagedReplicaArgs{
Location: pulumi.String("us-central1"),
},
},
},
},
})
if err != nil {
return err
}
secretSecretVersion, err := secretmanager.NewSecretVersion(ctx, "secret", &secretmanager.SecretVersionArgs{
Secret: secret.Name,
SecretData: pulumi.String("secret"),
Enabled: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("function-secret"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs20"),
EntryPoint: pulumi.String("helloHttp"),
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
SecretEnvironmentVariables: cloudfunctionsv2.FunctionServiceConfigSecretEnvironmentVariableArray{
&cloudfunctionsv2.FunctionServiceConfigSecretEnvironmentVariableArgs{
Key: pulumi.String("TEST"),
ProjectId: pulumi.String(project),
Secret: secret.SecretId,
Version: pulumi.String("latest"),
},
},
},
}, pulumi.DependsOn([]pulumi.Resource{
secretSecretVersion,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var secret = new Gcp.SecretManager.Secret("secret", new()
{
SecretId = "secret",
Replication = new Gcp.SecretManager.Inputs.SecretReplicationArgs
{
UserManaged = new Gcp.SecretManager.Inputs.SecretReplicationUserManagedArgs
{
Replicas = new[]
{
new Gcp.SecretManager.Inputs.SecretReplicationUserManagedReplicaArgs
{
Location = "us-central1",
},
},
},
},
});
var secretSecretVersion = new Gcp.SecretManager.SecretVersion("secret", new()
{
Secret = secret.Name,
SecretData = "secret",
Enabled = true,
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "function-secret",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs20",
EntryPoint = "helloHttp",
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
SecretEnvironmentVariables = new[]
{
new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigSecretEnvironmentVariableArgs
{
Key = "TEST",
ProjectId = project,
Secret = secret.SecretId,
Version = "latest",
},
},
},
}, new CustomResourceOptions
{
DependsOn =
{
secretSecretVersion,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.secretmanager.Secret;
import com.pulumi.gcp.secretmanager.SecretArgs;
import com.pulumi.gcp.secretmanager.inputs.SecretReplicationArgs;
import com.pulumi.gcp.secretmanager.inputs.SecretReplicationUserManagedArgs;
import com.pulumi.gcp.secretmanager.SecretVersion;
import com.pulumi.gcp.secretmanager.SecretVersionArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.asset.FileAsset;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var secret = new Secret("secret", SecretArgs.builder()
.secretId("secret")
.replication(SecretReplicationArgs.builder()
.userManaged(SecretReplicationUserManagedArgs.builder()
.replicas(SecretReplicationUserManagedReplicaArgs.builder()
.location("us-central1")
.build())
.build())
.build())
.build());
var secretSecretVersion = new SecretVersion("secretSecretVersion", SecretVersionArgs.builder()
.secret(secret.name())
.secretData("secret")
.enabled(true)
.build());
var function = new Function("function", FunctionArgs.builder()
.name("function-secret")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs20")
.entryPoint("helloHttp")
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.secretEnvironmentVariables(FunctionServiceConfigSecretEnvironmentVariableArgs.builder()
.key("TEST")
.projectId(project)
.secret(secret.secretId())
.version("latest")
.build())
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(secretSecretVersion)
.build());
}
}
resources:
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: function-secret
location: us-central1
description: a new function
buildConfig:
runtime: nodejs20
entryPoint: helloHttp
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
secretEnvironmentVariables:
- key: TEST
projectId: ${project}
secret: ${secret.secretId}
version: latest
options:
dependsOn:
- ${secretSecretVersion}
secret:
type: gcp:secretmanager:Secret
properties:
secretId: secret
replication:
userManaged:
replicas:
- location: us-central1
secretSecretVersion:
type: gcp:secretmanager:SecretVersion
name: secret
properties:
secret: ${secret.name}
secretData: secret
enabled: true
variables:
project: my-project-name
The secretEnvironmentVariables array maps Secret Manager secrets to environment variables visible to your function code. Each entry specifies the environment variable key, the secret name, and the version to use (or “latest” for automatic updates). The function depends on the secret version resource to ensure the secret exists before deployment.
Mount secrets as files in the filesystem
Some applications expect credentials in files rather than environment variables.
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const secret = new gcp.secretmanager.Secret("secret", {
secretId: "secret",
replication: {
userManaged: {
replicas: [{
location: "us-central1",
}],
},
},
});
const secretSecretVersion = new gcp.secretmanager.SecretVersion("secret", {
secret: secret.name,
secretData: "secret",
enabled: true,
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "function-secret",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs20",
entryPoint: "helloHttp",
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
secretVolumes: [{
mountPath: "/etc/secrets",
projectId: project,
secret: secret.secretId,
}],
},
}, {
dependsOn: [secretSecretVersion],
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
secret = gcp.secretmanager.Secret("secret",
secret_id="secret",
replication={
"user_managed": {
"replicas": [{
"location": "us-central1",
}],
},
})
secret_secret_version = gcp.secretmanager.SecretVersion("secret",
secret=secret.name,
secret_data="secret",
enabled=True)
function = gcp.cloudfunctionsv2.Function("function",
name="function-secret",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs20",
"entry_point": "helloHttp",
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
"secret_volumes": [{
"mount_path": "/etc/secrets",
"project_id": project,
"secret": secret.secret_id,
}],
},
opts = pulumi.ResourceOptions(depends_on=[secret_secret_version]))
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/secretmanager"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
secret, err := secretmanager.NewSecret(ctx, "secret", &secretmanager.SecretArgs{
SecretId: pulumi.String("secret"),
Replication: &secretmanager.SecretReplicationArgs{
UserManaged: &secretmanager.SecretReplicationUserManagedArgs{
Replicas: secretmanager.SecretReplicationUserManagedReplicaArray{
&secretmanager.SecretReplicationUserManagedReplicaArgs{
Location: pulumi.String("us-central1"),
},
},
},
},
})
if err != nil {
return err
}
secretSecretVersion, err := secretmanager.NewSecretVersion(ctx, "secret", &secretmanager.SecretVersionArgs{
Secret: secret.Name,
SecretData: pulumi.String("secret"),
Enabled: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("function-secret"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs20"),
EntryPoint: pulumi.String("helloHttp"),
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
SecretVolumes: cloudfunctionsv2.FunctionServiceConfigSecretVolumeArray{
&cloudfunctionsv2.FunctionServiceConfigSecretVolumeArgs{
MountPath: pulumi.String("/etc/secrets"),
ProjectId: pulumi.String(project),
Secret: secret.SecretId,
},
},
},
}, pulumi.DependsOn([]pulumi.Resource{
secretSecretVersion,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var secret = new Gcp.SecretManager.Secret("secret", new()
{
SecretId = "secret",
Replication = new Gcp.SecretManager.Inputs.SecretReplicationArgs
{
UserManaged = new Gcp.SecretManager.Inputs.SecretReplicationUserManagedArgs
{
Replicas = new[]
{
new Gcp.SecretManager.Inputs.SecretReplicationUserManagedReplicaArgs
{
Location = "us-central1",
},
},
},
},
});
var secretSecretVersion = new Gcp.SecretManager.SecretVersion("secret", new()
{
Secret = secret.Name,
SecretData = "secret",
Enabled = true,
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "function-secret",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs20",
EntryPoint = "helloHttp",
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
SecretVolumes = new[]
{
new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigSecretVolumeArgs
{
MountPath = "/etc/secrets",
ProjectId = project,
Secret = secret.SecretId,
},
},
},
}, new CustomResourceOptions
{
DependsOn =
{
secretSecretVersion,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.secretmanager.Secret;
import com.pulumi.gcp.secretmanager.SecretArgs;
import com.pulumi.gcp.secretmanager.inputs.SecretReplicationArgs;
import com.pulumi.gcp.secretmanager.inputs.SecretReplicationUserManagedArgs;
import com.pulumi.gcp.secretmanager.SecretVersion;
import com.pulumi.gcp.secretmanager.SecretVersionArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.asset.FileAsset;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var secret = new Secret("secret", SecretArgs.builder()
.secretId("secret")
.replication(SecretReplicationArgs.builder()
.userManaged(SecretReplicationUserManagedArgs.builder()
.replicas(SecretReplicationUserManagedReplicaArgs.builder()
.location("us-central1")
.build())
.build())
.build())
.build());
var secretSecretVersion = new SecretVersion("secretSecretVersion", SecretVersionArgs.builder()
.secret(secret.name())
.secretData("secret")
.enabled(true)
.build());
var function = new Function("function", FunctionArgs.builder()
.name("function-secret")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs20")
.entryPoint("helloHttp")
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.secretVolumes(FunctionServiceConfigSecretVolumeArgs.builder()
.mountPath("/etc/secrets")
.projectId(project)
.secret(secret.secretId())
.build())
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(secretSecretVersion)
.build());
}
}
resources:
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: function-secret
location: us-central1
description: a new function
buildConfig:
runtime: nodejs20
entryPoint: helloHttp
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
secretVolumes:
- mountPath: /etc/secrets
projectId: ${project}
secret: ${secret.secretId}
options:
dependsOn:
- ${secretSecretVersion}
secret:
type: gcp:secretmanager:Secret
properties:
secretId: secret
replication:
userManaged:
replicas:
- location: us-central1
secretSecretVersion:
type: gcp:secretmanager:SecretVersion
name: secret
properties:
secret: ${secret.name}
secretData: secret
enabled: true
variables:
project: my-project-name
The secretVolumes array mounts secrets into the function’s filesystem at the specified mountPath. Your code reads the secret from /etc/secrets/<secret-name> as a file. This approach works well for certificate files, configuration files, or applications that expect file-based credentials.
Respond to Cloud Storage object events
Data pipelines trigger processing when files land in Cloud Storage.
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const source_bucket = new gcp.storage.Bucket("source-bucket", {
name: "gcf-source-bucket",
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: source_bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const trigger_bucket = new gcp.storage.Bucket("trigger-bucket", {
name: "gcf-trigger-bucket",
location: "us-central1",
uniformBucketLevelAccess: true,
});
const gcsAccount = gcp.storage.getProjectServiceAccount({});
// To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
// (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
const gcs_pubsub_publishing = new gcp.projects.IAMMember("gcs-pubsub-publishing", {
project: "my-project-name",
role: "roles/pubsub.publisher",
member: gcsAccount.then(gcsAccount => `serviceAccount:${gcsAccount.emailAddress}`),
});
const account = new gcp.serviceaccount.Account("account", {
accountId: "gcf-sa",
displayName: "Test Service Account - used for both the cloud function and eventarc trigger in the test",
});
// Permissions on the service account used by the function and Eventarc trigger
const invoking = new gcp.projects.IAMMember("invoking", {
project: "my-project-name",
role: "roles/run.invoker",
member: pulumi.interpolate`serviceAccount:${account.email}`,
}, {
dependsOn: [gcs_pubsub_publishing],
});
const event_receiving = new gcp.projects.IAMMember("event-receiving", {
project: "my-project-name",
role: "roles/eventarc.eventReceiver",
member: pulumi.interpolate`serviceAccount:${account.email}`,
}, {
dependsOn: [invoking],
});
const artifactregistry_reader = new gcp.projects.IAMMember("artifactregistry-reader", {
project: "my-project-name",
role: "roles/artifactregistry.reader",
member: pulumi.interpolate`serviceAccount:${account.email}`,
}, {
dependsOn: [event_receiving],
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "gcf-function",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs20",
entryPoint: "entryPoint",
environmentVariables: {
BUILD_CONFIG_TEST: "build_test",
},
source: {
storageSource: {
bucket: source_bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 3,
minInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
environmentVariables: {
SERVICE_CONFIG_TEST: "config_test",
},
ingressSettings: "ALLOW_INTERNAL_ONLY",
allTrafficOnLatestRevision: true,
serviceAccountEmail: account.email,
},
eventTrigger: {
eventType: "google.cloud.storage.object.v1.finalized",
retryPolicy: "RETRY_POLICY_RETRY",
serviceAccountEmail: account.email,
eventFilters: [{
attribute: "bucket",
value: trigger_bucket.name,
}],
},
}, {
dependsOn: [
event_receiving,
artifactregistry_reader,
],
});
import pulumi
import pulumi_gcp as gcp
source_bucket = gcp.storage.Bucket("source-bucket",
name="gcf-source-bucket",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=source_bucket.name,
source=pulumi.FileAsset("function-source.zip"))
trigger_bucket = gcp.storage.Bucket("trigger-bucket",
name="gcf-trigger-bucket",
location="us-central1",
uniform_bucket_level_access=True)
gcs_account = gcp.storage.get_project_service_account()
# To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
# (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
gcs_pubsub_publishing = gcp.projects.IAMMember("gcs-pubsub-publishing",
project="my-project-name",
role="roles/pubsub.publisher",
member=f"serviceAccount:{gcs_account.email_address}")
account = gcp.serviceaccount.Account("account",
account_id="gcf-sa",
display_name="Test Service Account - used for both the cloud function and eventarc trigger in the test")
# Permissions on the service account used by the function and Eventarc trigger
invoking = gcp.projects.IAMMember("invoking",
project="my-project-name",
role="roles/run.invoker",
member=account.email.apply(lambda email: f"serviceAccount:{email}"),
opts = pulumi.ResourceOptions(depends_on=[gcs_pubsub_publishing]))
event_receiving = gcp.projects.IAMMember("event-receiving",
project="my-project-name",
role="roles/eventarc.eventReceiver",
member=account.email.apply(lambda email: f"serviceAccount:{email}"),
opts = pulumi.ResourceOptions(depends_on=[invoking]))
artifactregistry_reader = gcp.projects.IAMMember("artifactregistry-reader",
project="my-project-name",
role="roles/artifactregistry.reader",
member=account.email.apply(lambda email: f"serviceAccount:{email}"),
opts = pulumi.ResourceOptions(depends_on=[event_receiving]))
function = gcp.cloudfunctionsv2.Function("function",
name="gcf-function",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs20",
"entry_point": "entryPoint",
"environment_variables": {
"BUILD_CONFIG_TEST": "build_test",
},
"source": {
"storage_source": {
"bucket": source_bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 3,
"min_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
"environment_variables": {
"SERVICE_CONFIG_TEST": "config_test",
},
"ingress_settings": "ALLOW_INTERNAL_ONLY",
"all_traffic_on_latest_revision": True,
"service_account_email": account.email,
},
event_trigger={
"event_type": "google.cloud.storage.object.v1.finalized",
"retry_policy": "RETRY_POLICY_RETRY",
"service_account_email": account.email,
"event_filters": [{
"attribute": "bucket",
"value": trigger_bucket.name,
}],
},
opts = pulumi.ResourceOptions(depends_on=[
event_receiving,
artifactregistry_reader,
]))
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/projects"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/serviceaccount"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
source_bucket, err := storage.NewBucket(ctx, "source-bucket", &storage.BucketArgs{
Name: pulumi.String("gcf-source-bucket"),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: source_bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
trigger_bucket, err := storage.NewBucket(ctx, "trigger-bucket", &storage.BucketArgs{
Name: pulumi.String("gcf-trigger-bucket"),
Location: pulumi.String("us-central1"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
gcsAccount, err := storage.GetProjectServiceAccount(ctx, &storage.GetProjectServiceAccountArgs{}, nil)
if err != nil {
return err
}
// To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
// (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
gcs_pubsub_publishing, err := projects.NewIAMMember(ctx, "gcs-pubsub-publishing", &projects.IAMMemberArgs{
Project: pulumi.String("my-project-name"),
Role: pulumi.String("roles/pubsub.publisher"),
Member: pulumi.Sprintf("serviceAccount:%v", gcsAccount.EmailAddress),
})
if err != nil {
return err
}
account, err := serviceaccount.NewAccount(ctx, "account", &serviceaccount.AccountArgs{
AccountId: pulumi.String("gcf-sa"),
DisplayName: pulumi.String("Test Service Account - used for both the cloud function and eventarc trigger in the test"),
})
if err != nil {
return err
}
// Permissions on the service account used by the function and Eventarc trigger
invoking, err := projects.NewIAMMember(ctx, "invoking", &projects.IAMMemberArgs{
Project: pulumi.String("my-project-name"),
Role: pulumi.String("roles/run.invoker"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
}, pulumi.DependsOn([]pulumi.Resource{
gcs_pubsub_publishing,
}))
if err != nil {
return err
}
event_receiving, err := projects.NewIAMMember(ctx, "event-receiving", &projects.IAMMemberArgs{
Project: pulumi.String("my-project-name"),
Role: pulumi.String("roles/eventarc.eventReceiver"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
}, pulumi.DependsOn([]pulumi.Resource{
invoking,
}))
if err != nil {
return err
}
artifactregistry_reader, err := projects.NewIAMMember(ctx, "artifactregistry-reader", &projects.IAMMemberArgs{
Project: pulumi.String("my-project-name"),
Role: pulumi.String("roles/artifactregistry.reader"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
}, pulumi.DependsOn([]pulumi.Resource{
event_receiving,
}))
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("gcf-function"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs20"),
EntryPoint: pulumi.String("entryPoint"),
EnvironmentVariables: pulumi.StringMap{
"BUILD_CONFIG_TEST": pulumi.String("build_test"),
},
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: source_bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(3),
MinInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
EnvironmentVariables: pulumi.StringMap{
"SERVICE_CONFIG_TEST": pulumi.String("config_test"),
},
IngressSettings: pulumi.String("ALLOW_INTERNAL_ONLY"),
AllTrafficOnLatestRevision: pulumi.Bool(true),
ServiceAccountEmail: account.Email,
},
EventTrigger: &cloudfunctionsv2.FunctionEventTriggerArgs{
EventType: pulumi.String("google.cloud.storage.object.v1.finalized"),
RetryPolicy: pulumi.String("RETRY_POLICY_RETRY"),
ServiceAccountEmail: account.Email,
EventFilters: cloudfunctionsv2.FunctionEventTriggerEventFilterArray{
&cloudfunctionsv2.FunctionEventTriggerEventFilterArgs{
Attribute: pulumi.String("bucket"),
Value: trigger_bucket.Name,
},
},
},
}, pulumi.DependsOn([]pulumi.Resource{
event_receiving,
artifactregistry_reader,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var source_bucket = new Gcp.Storage.Bucket("source-bucket", new()
{
Name = "gcf-source-bucket",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = source_bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var trigger_bucket = new Gcp.Storage.Bucket("trigger-bucket", new()
{
Name = "gcf-trigger-bucket",
Location = "us-central1",
UniformBucketLevelAccess = true,
});
var gcsAccount = Gcp.Storage.GetProjectServiceAccount.Invoke();
// To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
// (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
var gcs_pubsub_publishing = new Gcp.Projects.IAMMember("gcs-pubsub-publishing", new()
{
Project = "my-project-name",
Role = "roles/pubsub.publisher",
Member = $"serviceAccount:{gcsAccount.Apply(getProjectServiceAccountResult => getProjectServiceAccountResult.EmailAddress)}",
});
var account = new Gcp.ServiceAccount.Account("account", new()
{
AccountId = "gcf-sa",
DisplayName = "Test Service Account - used for both the cloud function and eventarc trigger in the test",
});
// Permissions on the service account used by the function and Eventarc trigger
var invoking = new Gcp.Projects.IAMMember("invoking", new()
{
Project = "my-project-name",
Role = "roles/run.invoker",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
}, new CustomResourceOptions
{
DependsOn =
{
gcs_pubsub_publishing,
},
});
var event_receiving = new Gcp.Projects.IAMMember("event-receiving", new()
{
Project = "my-project-name",
Role = "roles/eventarc.eventReceiver",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
}, new CustomResourceOptions
{
DependsOn =
{
invoking,
},
});
var artifactregistry_reader = new Gcp.Projects.IAMMember("artifactregistry-reader", new()
{
Project = "my-project-name",
Role = "roles/artifactregistry.reader",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
}, new CustomResourceOptions
{
DependsOn =
{
event_receiving,
},
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "gcf-function",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs20",
EntryPoint = "entryPoint",
EnvironmentVariables =
{
{ "BUILD_CONFIG_TEST", "build_test" },
},
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = source_bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 3,
MinInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
EnvironmentVariables =
{
{ "SERVICE_CONFIG_TEST", "config_test" },
},
IngressSettings = "ALLOW_INTERNAL_ONLY",
AllTrafficOnLatestRevision = true,
ServiceAccountEmail = account.Email,
},
EventTrigger = new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerArgs
{
EventType = "google.cloud.storage.object.v1.finalized",
RetryPolicy = "RETRY_POLICY_RETRY",
ServiceAccountEmail = account.Email,
EventFilters = new[]
{
new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerEventFilterArgs
{
Attribute = "bucket",
Value = trigger_bucket.Name,
},
},
},
}, new CustomResourceOptions
{
DependsOn =
{
event_receiving,
artifactregistry_reader,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.storage.StorageFunctions;
import com.pulumi.gcp.storage.inputs.GetProjectServiceAccountArgs;
import com.pulumi.gcp.projects.IAMMember;
import com.pulumi.gcp.projects.IAMMemberArgs;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionEventTriggerArgs;
import com.pulumi.asset.FileAsset;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var source_bucket = new Bucket("source-bucket", BucketArgs.builder()
.name("gcf-source-bucket")
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(source_bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var trigger_bucket = new Bucket("trigger-bucket", BucketArgs.builder()
.name("gcf-trigger-bucket")
.location("us-central1")
.uniformBucketLevelAccess(true)
.build());
final var gcsAccount = StorageFunctions.getProjectServiceAccount(GetProjectServiceAccountArgs.builder()
.build());
// To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
// (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
var gcs_pubsub_publishing = new IAMMember("gcs-pubsub-publishing", IAMMemberArgs.builder()
.project("my-project-name")
.role("roles/pubsub.publisher")
.member(String.format("serviceAccount:%s", gcsAccount.emailAddress()))
.build());
var account = new Account("account", AccountArgs.builder()
.accountId("gcf-sa")
.displayName("Test Service Account - used for both the cloud function and eventarc trigger in the test")
.build());
// Permissions on the service account used by the function and Eventarc trigger
var invoking = new IAMMember("invoking", IAMMemberArgs.builder()
.project("my-project-name")
.role("roles/run.invoker")
.member(account.email().applyValue(_email -> String.format("serviceAccount:%s", _email)))
.build(), CustomResourceOptions.builder()
.dependsOn(gcs_pubsub_publishing)
.build());
var event_receiving = new IAMMember("event-receiving", IAMMemberArgs.builder()
.project("my-project-name")
.role("roles/eventarc.eventReceiver")
.member(account.email().applyValue(_email -> String.format("serviceAccount:%s", _email)))
.build(), CustomResourceOptions.builder()
.dependsOn(invoking)
.build());
var artifactregistry_reader = new IAMMember("artifactregistry-reader", IAMMemberArgs.builder()
.project("my-project-name")
.role("roles/artifactregistry.reader")
.member(account.email().applyValue(_email -> String.format("serviceAccount:%s", _email)))
.build(), CustomResourceOptions.builder()
.dependsOn(event_receiving)
.build());
var function = new Function("function", FunctionArgs.builder()
.name("gcf-function")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs20")
.entryPoint("entryPoint")
.environmentVariables(Map.of("BUILD_CONFIG_TEST", "build_test"))
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(source_bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(3)
.minInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.environmentVariables(Map.of("SERVICE_CONFIG_TEST", "config_test"))
.ingressSettings("ALLOW_INTERNAL_ONLY")
.allTrafficOnLatestRevision(true)
.serviceAccountEmail(account.email())
.build())
.eventTrigger(FunctionEventTriggerArgs.builder()
.eventType("google.cloud.storage.object.v1.finalized")
.retryPolicy("RETRY_POLICY_RETRY")
.serviceAccountEmail(account.email())
.eventFilters(FunctionEventTriggerEventFilterArgs.builder()
.attribute("bucket")
.value(trigger_bucket.name())
.build())
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(
event_receiving,
artifactregistry_reader)
.build());
}
}
resources:
source-bucket:
type: gcp:storage:Bucket
properties:
name: gcf-source-bucket
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${["source-bucket"].name}
source:
fn::FileAsset: function-source.zip
trigger-bucket:
type: gcp:storage:Bucket
properties:
name: gcf-trigger-bucket
location: us-central1
uniformBucketLevelAccess: true
# To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
# (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
gcs-pubsub-publishing:
type: gcp:projects:IAMMember
properties:
project: my-project-name
role: roles/pubsub.publisher
member: serviceAccount:${gcsAccount.emailAddress}
account:
type: gcp:serviceaccount:Account
properties:
accountId: gcf-sa
displayName: Test Service Account - used for both the cloud function and eventarc trigger in the test
# Permissions on the service account used by the function and Eventarc trigger
invoking:
type: gcp:projects:IAMMember
properties:
project: my-project-name
role: roles/run.invoker
member: serviceAccount:${account.email}
options:
dependsOn:
- ${["gcs-pubsub-publishing"]}
event-receiving:
type: gcp:projects:IAMMember
properties:
project: my-project-name
role: roles/eventarc.eventReceiver
member: serviceAccount:${account.email}
options:
dependsOn:
- ${invoking}
artifactregistry-reader:
type: gcp:projects:IAMMember
properties:
project: my-project-name
role: roles/artifactregistry.reader
member: serviceAccount:${account.email}
options:
dependsOn:
- ${["event-receiving"]}
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: gcf-function
location: us-central1
description: a new function
buildConfig:
runtime: nodejs20
entryPoint: entryPoint
environmentVariables:
BUILD_CONFIG_TEST: build_test
source:
storageSource:
bucket: ${["source-bucket"].name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 3
minInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
environmentVariables:
SERVICE_CONFIG_TEST: config_test
ingressSettings: ALLOW_INTERNAL_ONLY
allTrafficOnLatestRevision: true
serviceAccountEmail: ${account.email}
eventTrigger:
eventType: google.cloud.storage.object.v1.finalized
retryPolicy: RETRY_POLICY_RETRY
serviceAccountEmail: ${account.email}
eventFilters:
- attribute: bucket
value: ${["trigger-bucket"].name}
options:
dependsOn:
- ${["event-receiving"]}
- ${["artifactregistry-reader"]}
variables:
gcsAccount:
fn::invoke:
function: gcp:storage:getProjectServiceAccount
arguments: {}
When an object is created in the trigger bucket, Cloud Functions invokes your function with event metadata. The eventType google.cloud.storage.object.v1.finalized fires on successful uploads. The eventFilters array narrows which events trigger the function (here, filtering by bucket name). The GCS service account requires the Pub/Sub Publisher role, and the function’s service account needs Eventarc and Artifact Registry permissions. The example includes explicit IAM bindings and dependency management to ensure permissions stabilize before function deployment.
Connect directly to VPC networks
Functions that access Cloud SQL or internal APIs need VPC connectivity.
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "function-v2",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs20",
entryPoint: "helloHttp",
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
directVpcNetworkInterfaces: [{
network: "default",
subnetwork: "default",
tags: [
"tag1",
"tag2",
],
}],
directVpcEgress: "VPC_EGRESS_ALL_TRAFFIC",
},
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
function = gcp.cloudfunctionsv2.Function("function",
name="function-v2",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs20",
"entry_point": "helloHttp",
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
"direct_vpc_network_interfaces": [{
"network": "default",
"subnetwork": "default",
"tags": [
"tag1",
"tag2",
],
}],
"direct_vpc_egress": "VPC_EGRESS_ALL_TRAFFIC",
})
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("function-v2"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs20"),
EntryPoint: pulumi.String("helloHttp"),
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
DirectVpcNetworkInterfaces: cloudfunctionsv2.FunctionServiceConfigDirectVpcNetworkInterfaceArray{
&cloudfunctionsv2.FunctionServiceConfigDirectVpcNetworkInterfaceArgs{
Network: pulumi.String("default"),
Subnetwork: pulumi.String("default"),
Tags: pulumi.StringArray{
pulumi.String("tag1"),
pulumi.String("tag2"),
},
},
},
DirectVpcEgress: pulumi.String("VPC_EGRESS_ALL_TRAFFIC"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "function-v2",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs20",
EntryPoint = "helloHttp",
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
DirectVpcNetworkInterfaces = new[]
{
new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigDirectVpcNetworkInterfaceArgs
{
Network = "default",
Subnetwork = "default",
Tags = new[]
{
"tag1",
"tag2",
},
},
},
DirectVpcEgress = "VPC_EGRESS_ALL_TRAFFIC",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var function = new Function("function", FunctionArgs.builder()
.name("function-v2")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs20")
.entryPoint("helloHttp")
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.directVpcNetworkInterfaces(FunctionServiceConfigDirectVpcNetworkInterfaceArgs.builder()
.network("default")
.subnetwork("default")
.tags(
"tag1",
"tag2")
.build())
.directVpcEgress("VPC_EGRESS_ALL_TRAFFIC")
.build())
.build());
}
}
resources:
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: function-v2
location: us-central1
description: a new function
buildConfig:
runtime: nodejs20
entryPoint: helloHttp
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
directVpcNetworkInterfaces:
- network: default
subnetwork: default
tags:
- tag1
- tag2
directVpcEgress: VPC_EGRESS_ALL_TRAFFIC
variables:
project: my-project-name
The directVpcNetworkInterfaces array places your function in specified VPC networks and subnetworks, enabling access to private resources. The network and subnetwork properties reference existing VPC infrastructure. The tags property applies network tags for firewall rules. The directVpcEgress setting controls whether all traffic or only private traffic routes through the VPC. Direct VPC networking provides lower latency than Serverless VPC Access Connectors.
Beyond these examples
These snippets focus on specific function-level features: HTTP and event-driven triggers, secret management (environment variables and volume mounts), and VPC networking and service accounts. They’re intentionally minimal rather than full serverless applications.
The examples rely on pre-existing infrastructure such as Cloud Storage buckets for source code and event triggers, Pub/Sub topics, Secret Manager secrets, VPC networks and subnetworks, and service accounts with appropriate IAM roles. They focus on configuring the function rather than provisioning everything around it.
To keep things focused, common function patterns are omitted, including:
- Custom build service accounts and Cloud Build worker pools
- Customer-managed encryption keys (CMEK)
- Automatic base image updates and update policies
- Audit log event triggers with path pattern filtering
- Cloud Scheduler integration for scheduled invocations
- Artifact Registry repositories for container images
These omissions are intentional: the goal is to illustrate how each function feature is wired, not provide drop-in serverless modules. See the Cloud Functions v2 Function resource reference for all available configuration options.
Let's deploy GCP Cloud Functions (2nd Gen)
Get started with Pulumi Cloud, then follow our quick setup guide to deploy this infrastructure.
Try Pulumi Cloud for FREEFrequently Asked Questions
Configuration & Deployment
location, name, and project properties are immutable. To change them, you must destroy and recreate the function.buildConfig.source.storageSource with the bucket name and object name.automaticUpdatePolicy enables automatic base image updates, while onDeployUpdatePolicy updates the base image only during deployments.IAM & Permissions
roles/run.invoker (for invocation), roles/eventarc.eventReceiver (for event triggers), and roles/artifactregistry.reader (for accessing container images).roles/logging.logWriter, roles/artifactregistry.writer, roles/storage.objectAdmin) before creating the function.labels field is non-authoritative and only manages labels in your configuration. Use effectiveLabels to see all labels, including those added by other clients and services.Event Triggers
roles/pubsub.publisher IAM role on the GCS service account. Grant this role before creating the function with a GCS trigger.eventTrigger.eventFilters, set operator to match-path-pattern and use wildcards in the value (e.g., /projects/_/buckets/bucket-name/objects/*.txt).Secrets & Security
secretEnvironmentVariables to expose secrets as environment variables, or use secretVolumes to mount secrets as files at a specified path.kmsKeyName on the function and buildConfig.dockerRepository, then grant roles/cloudkms.cryptoKeyEncrypterDecrypter to multiple service accounts: gcf-admin-robot, gcp-sa-artifactregistry, gs-project-accounts, serverless-robot-prod, and the Eventarc service identity.Networking & Advanced Features
serviceConfig.directVpcNetworkInterfaces with network, subnetwork, and optional tags, then set directVpcEgress to control egress traffic.roles/cloudfunctions.invoker and roles/run.invoker to the service account.Using a different cloud?
Explore serverless guides for other cloud providers: