1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. logging
  5. ProjectSink
Viewing docs for Google Cloud v9.15.0
published on Thursday, Mar 12, 2026 by Pulumi
gcp logo
Viewing docs for Google Cloud v9.15.0
published on Thursday, Mar 12, 2026 by Pulumi

    Manages a project-level logging sink. For more information see:

    You can specify exclusions for log sinks created by terraform by using the exclusions field of gcp.logging.FolderSink

    Note: You must have granted the “Logs Configuration Writer” IAM role (roles/logging.configWriter) to the credentials used with this provider.

    Note You must enable the Cloud Resource Manager API

    Note: The _Default and _Required logging sinks are automatically created for a given project and cannot be deleted. Creating a resource of this type will acquire and update the resource that already exists at the desired location. These sinks cannot be removed so deleting this resource will remove the sink config from your terraform state but will leave the logging sink unchanged. The sinks that are currently automatically created are “_Default” and “_Required”.

    Example Usage

    Basic Sink

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const my_sink = new gcp.logging.ProjectSink("my-sink", {
        name: "my-pubsub-instance-sink",
        destination: "pubsub.googleapis.com/projects/my-project/topics/instance-activity",
        filter: "resource.type = gce_instance AND severity >= WARNING",
        uniqueWriterIdentity: true,
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    my_sink = gcp.logging.ProjectSink("my-sink",
        name="my-pubsub-instance-sink",
        destination="pubsub.googleapis.com/projects/my-project/topics/instance-activity",
        filter="resource.type = gce_instance AND severity >= WARNING",
        unique_writer_identity=True)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/logging"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := logging.NewProjectSink(ctx, "my-sink", &logging.ProjectSinkArgs{
    			Name:                 pulumi.String("my-pubsub-instance-sink"),
    			Destination:          pulumi.String("pubsub.googleapis.com/projects/my-project/topics/instance-activity"),
    			Filter:               pulumi.String("resource.type = gce_instance AND severity >= WARNING"),
    			UniqueWriterIdentity: pulumi.Bool(true),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var my_sink = new Gcp.Logging.ProjectSink("my-sink", new()
        {
            Name = "my-pubsub-instance-sink",
            Destination = "pubsub.googleapis.com/projects/my-project/topics/instance-activity",
            Filter = "resource.type = gce_instance AND severity >= WARNING",
            UniqueWriterIdentity = true,
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.logging.ProjectSink;
    import com.pulumi.gcp.logging.ProjectSinkArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var my_sink = new ProjectSink("my-sink", ProjectSinkArgs.builder()
                .name("my-pubsub-instance-sink")
                .destination("pubsub.googleapis.com/projects/my-project/topics/instance-activity")
                .filter("resource.type = gce_instance AND severity >= WARNING")
                .uniqueWriterIdentity(true)
                .build());
    
        }
    }
    
    resources:
      my-sink:
        type: gcp:logging:ProjectSink
        properties:
          name: my-pubsub-instance-sink
          destination: pubsub.googleapis.com/projects/my-project/topics/instance-activity
          filter: resource.type = gce_instance AND severity >= WARNING
          uniqueWriterIdentity: true
    

    Cloud Storage Bucket Destination

    A more complete example follows: this creates a compute instance, as well as a log sink that logs all activity to a cloud storage bucket. Because we are using unique_writer_identity, we must grant it access to the bucket.

    Note that this grant requires the “Project IAM Admin” IAM role (roles/resourcemanager.projectIamAdmin) granted to the credentials used with Terraform.

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    // Our logged compute instance
    const my_logged_instance = new gcp.compute.Instance("my-logged-instance", {
        networkInterfaces: [{
            accessConfigs: [{}],
            network: "default",
        }],
        name: "my-instance",
        machineType: "e2-medium",
        zone: "us-central1-a",
        bootDisk: {
            initializeParams: {
                image: "debian-cloud/debian-11",
            },
        },
    });
    // A gcs bucket to store logs in
    const gcs_bucket = new gcp.storage.Bucket("gcs-bucket", {
        name: "my-unique-logging-bucket",
        location: "US",
    });
    // Our sink; this logs all activity related to our "my-logged-instance" instance
    const instance_sink = new gcp.logging.ProjectSink("instance-sink", {
        name: "my-instance-sink",
        description: "some explanation on what this is",
        destination: pulumi.interpolate`storage.googleapis.com/${gcs_bucket.name}`,
        filter: pulumi.interpolate`resource.type = gce_instance AND resource.labels.instance_id = "${my_logged_instance.instanceId}"`,
        uniqueWriterIdentity: true,
    });
    // Because our sink uses a unique_writer, we must grant that writer access to the bucket.
    const gcs_bucket_writer = new gcp.projects.IAMBinding("gcs-bucket-writer", {
        project: "your-project-id",
        role: "roles/storage.objectCreator",
        members: [instance_sink.writerIdentity],
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    # Our logged compute instance
    my_logged_instance = gcp.compute.Instance("my-logged-instance",
        network_interfaces=[{
            "access_configs": [{}],
            "network": "default",
        }],
        name="my-instance",
        machine_type="e2-medium",
        zone="us-central1-a",
        boot_disk={
            "initialize_params": {
                "image": "debian-cloud/debian-11",
            },
        })
    # A gcs bucket to store logs in
    gcs_bucket = gcp.storage.Bucket("gcs-bucket",
        name="my-unique-logging-bucket",
        location="US")
    # Our sink; this logs all activity related to our "my-logged-instance" instance
    instance_sink = gcp.logging.ProjectSink("instance-sink",
        name="my-instance-sink",
        description="some explanation on what this is",
        destination=gcs_bucket.name.apply(lambda name: f"storage.googleapis.com/{name}"),
        filter=my_logged_instance.instance_id.apply(lambda instance_id: f"resource.type = gce_instance AND resource.labels.instance_id = \"{instance_id}\""),
        unique_writer_identity=True)
    # Because our sink uses a unique_writer, we must grant that writer access to the bucket.
    gcs_bucket_writer = gcp.projects.IAMBinding("gcs-bucket-writer",
        project="your-project-id",
        role="roles/storage.objectCreator",
        members=[instance_sink.writer_identity])
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/compute"
    	"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/logging"
    	"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/projects"
    	"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/storage"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		// Our logged compute instance
    		my_logged_instance, err := compute.NewInstance(ctx, "my-logged-instance", &compute.InstanceArgs{
    			NetworkInterfaces: compute.InstanceNetworkInterfaceArray{
    				&compute.InstanceNetworkInterfaceArgs{
    					AccessConfigs: compute.InstanceNetworkInterfaceAccessConfigArray{
    						&compute.InstanceNetworkInterfaceAccessConfigArgs{},
    					},
    					Network: pulumi.String("default"),
    				},
    			},
    			Name:        pulumi.String("my-instance"),
    			MachineType: pulumi.String("e2-medium"),
    			Zone:        pulumi.String("us-central1-a"),
    			BootDisk: &compute.InstanceBootDiskArgs{
    				InitializeParams: &compute.InstanceBootDiskInitializeParamsArgs{
    					Image: pulumi.String("debian-cloud/debian-11"),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		// A gcs bucket to store logs in
    		gcs_bucket, err := storage.NewBucket(ctx, "gcs-bucket", &storage.BucketArgs{
    			Name:     pulumi.String("my-unique-logging-bucket"),
    			Location: pulumi.String("US"),
    		})
    		if err != nil {
    			return err
    		}
    		// Our sink; this logs all activity related to our "my-logged-instance" instance
    		instance_sink, err := logging.NewProjectSink(ctx, "instance-sink", &logging.ProjectSinkArgs{
    			Name:        pulumi.String("my-instance-sink"),
    			Description: pulumi.String("some explanation on what this is"),
    			Destination: gcs_bucket.Name.ApplyT(func(name string) (string, error) {
    				return fmt.Sprintf("storage.googleapis.com/%v", name), nil
    			}).(pulumi.StringOutput),
    			Filter: my_logged_instance.InstanceId.ApplyT(func(instanceId string) (string, error) {
    				return fmt.Sprintf("resource.type = gce_instance AND resource.labels.instance_id = \"%v\"", instanceId), nil
    			}).(pulumi.StringOutput),
    			UniqueWriterIdentity: pulumi.Bool(true),
    		})
    		if err != nil {
    			return err
    		}
    		// Because our sink uses a unique_writer, we must grant that writer access to the bucket.
    		_, err = projects.NewIAMBinding(ctx, "gcs-bucket-writer", &projects.IAMBindingArgs{
    			Project: pulumi.String("your-project-id"),
    			Role:    pulumi.String("roles/storage.objectCreator"),
    			Members: pulumi.StringArray{
    				instance_sink.WriterIdentity,
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        // Our logged compute instance
        var my_logged_instance = new Gcp.Compute.Instance("my-logged-instance", new()
        {
            NetworkInterfaces = new[]
            {
                new Gcp.Compute.Inputs.InstanceNetworkInterfaceArgs
                {
                    AccessConfigs = new[]
                    {
                        null,
                    },
                    Network = "default",
                },
            },
            Name = "my-instance",
            MachineType = "e2-medium",
            Zone = "us-central1-a",
            BootDisk = new Gcp.Compute.Inputs.InstanceBootDiskArgs
            {
                InitializeParams = new Gcp.Compute.Inputs.InstanceBootDiskInitializeParamsArgs
                {
                    Image = "debian-cloud/debian-11",
                },
            },
        });
    
        // A gcs bucket to store logs in
        var gcs_bucket = new Gcp.Storage.Bucket("gcs-bucket", new()
        {
            Name = "my-unique-logging-bucket",
            Location = "US",
        });
    
        // Our sink; this logs all activity related to our "my-logged-instance" instance
        var instance_sink = new Gcp.Logging.ProjectSink("instance-sink", new()
        {
            Name = "my-instance-sink",
            Description = "some explanation on what this is",
            Destination = gcs_bucket.Name.Apply(name => $"storage.googleapis.com/{name}"),
            Filter = my_logged_instance.InstanceId.Apply(instanceId => $"resource.type = gce_instance AND resource.labels.instance_id = \"{instanceId}\""),
            UniqueWriterIdentity = true,
        });
    
        // Because our sink uses a unique_writer, we must grant that writer access to the bucket.
        var gcs_bucket_writer = new Gcp.Projects.IAMBinding("gcs-bucket-writer", new()
        {
            Project = "your-project-id",
            Role = "roles/storage.objectCreator",
            Members = new[]
            {
                instance_sink.WriterIdentity,
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.compute.Instance;
    import com.pulumi.gcp.compute.InstanceArgs;
    import com.pulumi.gcp.compute.inputs.InstanceNetworkInterfaceArgs;
    import com.pulumi.gcp.compute.inputs.InstanceBootDiskArgs;
    import com.pulumi.gcp.compute.inputs.InstanceBootDiskInitializeParamsArgs;
    import com.pulumi.gcp.storage.Bucket;
    import com.pulumi.gcp.storage.BucketArgs;
    import com.pulumi.gcp.logging.ProjectSink;
    import com.pulumi.gcp.logging.ProjectSinkArgs;
    import com.pulumi.gcp.projects.IAMBinding;
    import com.pulumi.gcp.projects.IAMBindingArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            // Our logged compute instance
            var my_logged_instance = new Instance("my-logged-instance", InstanceArgs.builder()
                .networkInterfaces(InstanceNetworkInterfaceArgs.builder()
                    .accessConfigs(InstanceNetworkInterfaceAccessConfigArgs.builder()
                        .build())
                    .network("default")
                    .build())
                .name("my-instance")
                .machineType("e2-medium")
                .zone("us-central1-a")
                .bootDisk(InstanceBootDiskArgs.builder()
                    .initializeParams(InstanceBootDiskInitializeParamsArgs.builder()
                        .image("debian-cloud/debian-11")
                        .build())
                    .build())
                .build());
    
            // A gcs bucket to store logs in
            var gcs_bucket = new Bucket("gcs-bucket", BucketArgs.builder()
                .name("my-unique-logging-bucket")
                .location("US")
                .build());
    
            // Our sink; this logs all activity related to our "my-logged-instance" instance
            var instance_sink = new ProjectSink("instance-sink", ProjectSinkArgs.builder()
                .name("my-instance-sink")
                .description("some explanation on what this is")
                .destination(gcs_bucket.name().applyValue(_name -> String.format("storage.googleapis.com/%s", _name)))
                .filter(my_logged_instance.instanceId().applyValue(_instanceId -> String.format("resource.type = gce_instance AND resource.labels.instance_id = \"%s\"", _instanceId)))
                .uniqueWriterIdentity(true)
                .build());
    
            // Because our sink uses a unique_writer, we must grant that writer access to the bucket.
            var gcs_bucket_writer = new IAMBinding("gcs-bucket-writer", IAMBindingArgs.builder()
                .project("your-project-id")
                .role("roles/storage.objectCreator")
                .members(instance_sink.writerIdentity())
                .build());
    
        }
    }
    
    resources:
      # Our logged compute instance
      my-logged-instance:
        type: gcp:compute:Instance
        properties:
          networkInterfaces:
            - accessConfigs:
                - {}
              network: default
          name: my-instance
          machineType: e2-medium
          zone: us-central1-a
          bootDisk:
            initializeParams:
              image: debian-cloud/debian-11
      # A gcs bucket to store logs in
      gcs-bucket:
        type: gcp:storage:Bucket
        properties:
          name: my-unique-logging-bucket
          location: US
      # Our sink; this logs all activity related to our "my-logged-instance" instance
      instance-sink:
        type: gcp:logging:ProjectSink
        properties:
          name: my-instance-sink
          description: some explanation on what this is
          destination: storage.googleapis.com/${["gcs-bucket"].name}
          filter: resource.type = gce_instance AND resource.labels.instance_id = "${["my-logged-instance"].instanceId}"
          uniqueWriterIdentity: true
      # Because our sink uses a unique_writer, we must grant that writer access to the bucket.
      gcs-bucket-writer:
        type: gcp:projects:IAMBinding
        properties:
          project: your-project-id
          role: roles/storage.objectCreator
          members:
            - ${["instance-sink"].writerIdentity}
    

    User-Managed Service Account

    The following example creates a sink that are configured with user-managed service accounts, by specifying the custom_writer_identity field.

    Note that you can only create a sink that uses a user-managed service account when the sink destination is a log bucket.

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const custom_sa = new gcp.serviceaccount.Account("custom-sa", {
        project: "other-project-id",
        accountId: "gce-log-bucket-sink",
        displayName: "gce-log-bucket-sink",
    });
    // Create a sink that uses user-managed service account
    const my_sink = new gcp.logging.ProjectSink("my-sink", {
        name: "other-project-log-bucket-sink",
        destination: "logging.googleapis.com/projects/other-project-id/locations/global/buckets/gce-logs",
        filter: "resource.type = gce_instance AND severity >= WARNING",
        uniqueWriterIdentity: true,
        customWriterIdentity: custom_sa.email,
    });
    // grant writer access to the user-managed service account
    const custom_sa_logbucket_binding = new gcp.projects.IAMMember("custom-sa-logbucket-binding", {
        project: "destination-project-id",
        role: "roles/logging.bucketWriter",
        member: pulumi.interpolate`serviceAccount:${custom_sa.email}`,
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    custom_sa = gcp.serviceaccount.Account("custom-sa",
        project="other-project-id",
        account_id="gce-log-bucket-sink",
        display_name="gce-log-bucket-sink")
    # Create a sink that uses user-managed service account
    my_sink = gcp.logging.ProjectSink("my-sink",
        name="other-project-log-bucket-sink",
        destination="logging.googleapis.com/projects/other-project-id/locations/global/buckets/gce-logs",
        filter="resource.type = gce_instance AND severity >= WARNING",
        unique_writer_identity=True,
        custom_writer_identity=custom_sa.email)
    # grant writer access to the user-managed service account
    custom_sa_logbucket_binding = gcp.projects.IAMMember("custom-sa-logbucket-binding",
        project="destination-project-id",
        role="roles/logging.bucketWriter",
        member=custom_sa.email.apply(lambda email: f"serviceAccount:{email}"))
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/logging"
    	"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/projects"
    	"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/serviceaccount"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		custom_sa, err := serviceaccount.NewAccount(ctx, "custom-sa", &serviceaccount.AccountArgs{
    			Project:     pulumi.String("other-project-id"),
    			AccountId:   pulumi.String("gce-log-bucket-sink"),
    			DisplayName: pulumi.String("gce-log-bucket-sink"),
    		})
    		if err != nil {
    			return err
    		}
    		// Create a sink that uses user-managed service account
    		_, err = logging.NewProjectSink(ctx, "my-sink", &logging.ProjectSinkArgs{
    			Name:                 pulumi.String("other-project-log-bucket-sink"),
    			Destination:          pulumi.String("logging.googleapis.com/projects/other-project-id/locations/global/buckets/gce-logs"),
    			Filter:               pulumi.String("resource.type = gce_instance AND severity >= WARNING"),
    			UniqueWriterIdentity: pulumi.Bool(true),
    			CustomWriterIdentity: custom_sa.Email,
    		})
    		if err != nil {
    			return err
    		}
    		// grant writer access to the user-managed service account
    		_, err = projects.NewIAMMember(ctx, "custom-sa-logbucket-binding", &projects.IAMMemberArgs{
    			Project: pulumi.String("destination-project-id"),
    			Role:    pulumi.String("roles/logging.bucketWriter"),
    			Member: custom_sa.Email.ApplyT(func(email string) (string, error) {
    				return fmt.Sprintf("serviceAccount:%v", email), nil
    			}).(pulumi.StringOutput),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var custom_sa = new Gcp.ServiceAccount.Account("custom-sa", new()
        {
            Project = "other-project-id",
            AccountId = "gce-log-bucket-sink",
            DisplayName = "gce-log-bucket-sink",
        });
    
        // Create a sink that uses user-managed service account
        var my_sink = new Gcp.Logging.ProjectSink("my-sink", new()
        {
            Name = "other-project-log-bucket-sink",
            Destination = "logging.googleapis.com/projects/other-project-id/locations/global/buckets/gce-logs",
            Filter = "resource.type = gce_instance AND severity >= WARNING",
            UniqueWriterIdentity = true,
            CustomWriterIdentity = custom_sa.Email,
        });
    
        // grant writer access to the user-managed service account
        var custom_sa_logbucket_binding = new Gcp.Projects.IAMMember("custom-sa-logbucket-binding", new()
        {
            Project = "destination-project-id",
            Role = "roles/logging.bucketWriter",
            Member = custom_sa.Email.Apply(email => $"serviceAccount:{email}"),
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.serviceaccount.Account;
    import com.pulumi.gcp.serviceaccount.AccountArgs;
    import com.pulumi.gcp.logging.ProjectSink;
    import com.pulumi.gcp.logging.ProjectSinkArgs;
    import com.pulumi.gcp.projects.IAMMember;
    import com.pulumi.gcp.projects.IAMMemberArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var custom_sa = new Account("custom-sa", AccountArgs.builder()
                .project("other-project-id")
                .accountId("gce-log-bucket-sink")
                .displayName("gce-log-bucket-sink")
                .build());
    
            // Create a sink that uses user-managed service account
            var my_sink = new ProjectSink("my-sink", ProjectSinkArgs.builder()
                .name("other-project-log-bucket-sink")
                .destination("logging.googleapis.com/projects/other-project-id/locations/global/buckets/gce-logs")
                .filter("resource.type = gce_instance AND severity >= WARNING")
                .uniqueWriterIdentity(true)
                .customWriterIdentity(custom_sa.email())
                .build());
    
            // grant writer access to the user-managed service account
            var custom_sa_logbucket_binding = new IAMMember("custom-sa-logbucket-binding", IAMMemberArgs.builder()
                .project("destination-project-id")
                .role("roles/logging.bucketWriter")
                .member(custom_sa.email().applyValue(_email -> String.format("serviceAccount:%s", _email)))
                .build());
    
        }
    }
    
    resources:
      custom-sa:
        type: gcp:serviceaccount:Account
        properties:
          project: other-project-id
          accountId: gce-log-bucket-sink
          displayName: gce-log-bucket-sink
      # Create a sink that uses user-managed service account
      my-sink:
        type: gcp:logging:ProjectSink
        properties:
          name: other-project-log-bucket-sink
          destination: logging.googleapis.com/projects/other-project-id/locations/global/buckets/gce-logs
          filter: resource.type = gce_instance AND severity >= WARNING
          uniqueWriterIdentity: true # Use a user-managed service account
          customWriterIdentity: ${["custom-sa"].email}
      # grant writer access to the user-managed service account
      custom-sa-logbucket-binding:
        type: gcp:projects:IAMMember
        properties:
          project: destination-project-id
          role: roles/logging.bucketWriter
          member: serviceAccount:${["custom-sa"].email}
    

    The above example will create a log sink that route logs to destination GCP project using an user-managed service account.

    Sink Exclusions

    The following example uses exclusions to filter logs that will not be exported. In this example logs are exported to a log bucket and there are 2 exclusions configured

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const log_bucket = new gcp.logging.ProjectSink("log-bucket", {
        name: "my-logging-sink",
        destination: "logging.googleapis.com/projects/my-project/locations/global/buckets/_Default",
        exclusions: [
            {
                name: "nsexcllusion1",
                description: "Exclude logs from namespace-1 in k8s",
                filter: "resource.type = k8s_container resource.labels.namespace_name=\"namespace-1\" ",
            },
            {
                name: "nsexcllusion2",
                description: "Exclude logs from namespace-2 in k8s",
                filter: "resource.type = k8s_container resource.labels.namespace_name=\"namespace-2\" ",
            },
        ],
        uniqueWriterIdentity: true,
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    log_bucket = gcp.logging.ProjectSink("log-bucket",
        name="my-logging-sink",
        destination="logging.googleapis.com/projects/my-project/locations/global/buckets/_Default",
        exclusions=[
            {
                "name": "nsexcllusion1",
                "description": "Exclude logs from namespace-1 in k8s",
                "filter": "resource.type = k8s_container resource.labels.namespace_name=\"namespace-1\" ",
            },
            {
                "name": "nsexcllusion2",
                "description": "Exclude logs from namespace-2 in k8s",
                "filter": "resource.type = k8s_container resource.labels.namespace_name=\"namespace-2\" ",
            },
        ],
        unique_writer_identity=True)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v9/go/gcp/logging"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := logging.NewProjectSink(ctx, "log-bucket", &logging.ProjectSinkArgs{
    			Name:        pulumi.String("my-logging-sink"),
    			Destination: pulumi.String("logging.googleapis.com/projects/my-project/locations/global/buckets/_Default"),
    			Exclusions: logging.ProjectSinkExclusionArray{
    				&logging.ProjectSinkExclusionArgs{
    					Name:        pulumi.String("nsexcllusion1"),
    					Description: pulumi.String("Exclude logs from namespace-1 in k8s"),
    					Filter:      pulumi.String("resource.type = k8s_container resource.labels.namespace_name=\"namespace-1\" "),
    				},
    				&logging.ProjectSinkExclusionArgs{
    					Name:        pulumi.String("nsexcllusion2"),
    					Description: pulumi.String("Exclude logs from namespace-2 in k8s"),
    					Filter:      pulumi.String("resource.type = k8s_container resource.labels.namespace_name=\"namespace-2\" "),
    				},
    			},
    			UniqueWriterIdentity: pulumi.Bool(true),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var log_bucket = new Gcp.Logging.ProjectSink("log-bucket", new()
        {
            Name = "my-logging-sink",
            Destination = "logging.googleapis.com/projects/my-project/locations/global/buckets/_Default",
            Exclusions = new[]
            {
                new Gcp.Logging.Inputs.ProjectSinkExclusionArgs
                {
                    Name = "nsexcllusion1",
                    Description = "Exclude logs from namespace-1 in k8s",
                    Filter = "resource.type = k8s_container resource.labels.namespace_name=\"namespace-1\" ",
                },
                new Gcp.Logging.Inputs.ProjectSinkExclusionArgs
                {
                    Name = "nsexcllusion2",
                    Description = "Exclude logs from namespace-2 in k8s",
                    Filter = "resource.type = k8s_container resource.labels.namespace_name=\"namespace-2\" ",
                },
            },
            UniqueWriterIdentity = true,
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.logging.ProjectSink;
    import com.pulumi.gcp.logging.ProjectSinkArgs;
    import com.pulumi.gcp.logging.inputs.ProjectSinkExclusionArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var log_bucket = new ProjectSink("log-bucket", ProjectSinkArgs.builder()
                .name("my-logging-sink")
                .destination("logging.googleapis.com/projects/my-project/locations/global/buckets/_Default")
                .exclusions(            
                    ProjectSinkExclusionArgs.builder()
                        .name("nsexcllusion1")
                        .description("Exclude logs from namespace-1 in k8s")
                        .filter("resource.type = k8s_container resource.labels.namespace_name=\"namespace-1\" ")
                        .build(),
                    ProjectSinkExclusionArgs.builder()
                        .name("nsexcllusion2")
                        .description("Exclude logs from namespace-2 in k8s")
                        .filter("resource.type = k8s_container resource.labels.namespace_name=\"namespace-2\" ")
                        .build())
                .uniqueWriterIdentity(true)
                .build());
    
        }
    }
    
    resources:
      log-bucket:
        type: gcp:logging:ProjectSink
        properties:
          name: my-logging-sink
          destination: logging.googleapis.com/projects/my-project/locations/global/buckets/_Default
          exclusions:
            - name: nsexcllusion1
              description: Exclude logs from namespace-1 in k8s
              filter: 'resource.type = k8s_container resource.labels.namespace_name="namespace-1" '
            - name: nsexcllusion2
              description: Exclude logs from namespace-2 in k8s
              filter: 'resource.type = k8s_container resource.labels.namespace_name="namespace-2" '
          uniqueWriterIdentity: true
    

    Create ProjectSink Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new ProjectSink(name: string, args: ProjectSinkArgs, opts?: CustomResourceOptions);
    @overload
    def ProjectSink(resource_name: str,
                    args: ProjectSinkArgs,
                    opts: Optional[ResourceOptions] = None)
    
    @overload
    def ProjectSink(resource_name: str,
                    opts: Optional[ResourceOptions] = None,
                    destination: Optional[str] = None,
                    bigquery_options: Optional[ProjectSinkBigqueryOptionsArgs] = None,
                    custom_writer_identity: Optional[str] = None,
                    description: Optional[str] = None,
                    disabled: Optional[bool] = None,
                    exclusions: Optional[Sequence[ProjectSinkExclusionArgs]] = None,
                    filter: Optional[str] = None,
                    name: Optional[str] = None,
                    project: Optional[str] = None,
                    unique_writer_identity: Optional[bool] = None)
    func NewProjectSink(ctx *Context, name string, args ProjectSinkArgs, opts ...ResourceOption) (*ProjectSink, error)
    public ProjectSink(string name, ProjectSinkArgs args, CustomResourceOptions? opts = null)
    public ProjectSink(String name, ProjectSinkArgs args)
    public ProjectSink(String name, ProjectSinkArgs args, CustomResourceOptions options)
    
    type: gcp:logging:ProjectSink
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args ProjectSinkArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args ProjectSinkArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args ProjectSinkArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args ProjectSinkArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args ProjectSinkArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var projectSinkResource = new Gcp.Logging.ProjectSink("projectSinkResource", new()
    {
        Destination = "string",
        BigqueryOptions = new Gcp.Logging.Inputs.ProjectSinkBigqueryOptionsArgs
        {
            UsePartitionedTables = false,
        },
        CustomWriterIdentity = "string",
        Description = "string",
        Disabled = false,
        Exclusions = new[]
        {
            new Gcp.Logging.Inputs.ProjectSinkExclusionArgs
            {
                Filter = "string",
                Name = "string",
                Description = "string",
                Disabled = false,
            },
        },
        Filter = "string",
        Name = "string",
        Project = "string",
        UniqueWriterIdentity = false,
    });
    
    example, err := logging.NewProjectSink(ctx, "projectSinkResource", &logging.ProjectSinkArgs{
    	Destination: pulumi.String("string"),
    	BigqueryOptions: &logging.ProjectSinkBigqueryOptionsArgs{
    		UsePartitionedTables: pulumi.Bool(false),
    	},
    	CustomWriterIdentity: pulumi.String("string"),
    	Description:          pulumi.String("string"),
    	Disabled:             pulumi.Bool(false),
    	Exclusions: logging.ProjectSinkExclusionArray{
    		&logging.ProjectSinkExclusionArgs{
    			Filter:      pulumi.String("string"),
    			Name:        pulumi.String("string"),
    			Description: pulumi.String("string"),
    			Disabled:    pulumi.Bool(false),
    		},
    	},
    	Filter:               pulumi.String("string"),
    	Name:                 pulumi.String("string"),
    	Project:              pulumi.String("string"),
    	UniqueWriterIdentity: pulumi.Bool(false),
    })
    
    var projectSinkResource = new ProjectSink("projectSinkResource", ProjectSinkArgs.builder()
        .destination("string")
        .bigqueryOptions(ProjectSinkBigqueryOptionsArgs.builder()
            .usePartitionedTables(false)
            .build())
        .customWriterIdentity("string")
        .description("string")
        .disabled(false)
        .exclusions(ProjectSinkExclusionArgs.builder()
            .filter("string")
            .name("string")
            .description("string")
            .disabled(false)
            .build())
        .filter("string")
        .name("string")
        .project("string")
        .uniqueWriterIdentity(false)
        .build());
    
    project_sink_resource = gcp.logging.ProjectSink("projectSinkResource",
        destination="string",
        bigquery_options={
            "use_partitioned_tables": False,
        },
        custom_writer_identity="string",
        description="string",
        disabled=False,
        exclusions=[{
            "filter": "string",
            "name": "string",
            "description": "string",
            "disabled": False,
        }],
        filter="string",
        name="string",
        project="string",
        unique_writer_identity=False)
    
    const projectSinkResource = new gcp.logging.ProjectSink("projectSinkResource", {
        destination: "string",
        bigqueryOptions: {
            usePartitionedTables: false,
        },
        customWriterIdentity: "string",
        description: "string",
        disabled: false,
        exclusions: [{
            filter: "string",
            name: "string",
            description: "string",
            disabled: false,
        }],
        filter: "string",
        name: "string",
        project: "string",
        uniqueWriterIdentity: false,
    });
    
    type: gcp:logging:ProjectSink
    properties:
        bigqueryOptions:
            usePartitionedTables: false
        customWriterIdentity: string
        description: string
        destination: string
        disabled: false
        exclusions:
            - description: string
              disabled: false
              filter: string
              name: string
        filter: string
        name: string
        project: string
        uniqueWriterIdentity: false
    

    ProjectSink Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The ProjectSink resource accepts the following input properties:

    Destination string

    The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, a BigQuery dataset, a Cloud Logging bucket, or a Google Cloud project. Examples:

    • storage.googleapis.com/[GCS_BUCKET]
    • bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]
    • pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]/locations/global/buckets/[BUCKET_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]

    The writer associated with the sink must have access to write to the above resource.

    BigqueryOptions ProjectSinkBigqueryOptions
    Options that affect sinks exporting data to BigQuery. Structure documented below.
    CustomWriterIdentity string
    A user managed service account that will be used to write the log entries. The format must be serviceAccount:some@email. This field can only be specified if you are routing logs to a destination outside this sink's project. If not specified, a Logging service account will automatically be generated.
    Description string
    A description of this sink. The maximum length of the description is 8000 characters.
    Disabled bool
    If set to True, then this sink is disabled and it does not export any log entries.
    Exclusions List<ProjectSinkExclusion>
    Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusions.filter, it will not be exported. Can be repeated multiple times for multiple exclusions. Structure is documented below.
    Filter string
    The filter to apply when exporting logs. Only log entries that match the filter are exported. See Advanced Log Filters for information on how to write a filter.
    Name string
    The name of the logging sink. Logging automatically creates two sinks: _Required and _Default.
    Project string
    The ID of the project to create the sink in. If omitted, the project associated with the provider is used.
    UniqueWriterIdentity bool
    Whether or not to create a unique identity associated with this sink. If false, then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true (the default), then a unique service account is created and used for this sink. If you wish to publish logs across projects or utilize bigquery_options, you must set unique_writer_identity to true.
    Destination string

    The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, a BigQuery dataset, a Cloud Logging bucket, or a Google Cloud project. Examples:

    • storage.googleapis.com/[GCS_BUCKET]
    • bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]
    • pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]/locations/global/buckets/[BUCKET_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]

    The writer associated with the sink must have access to write to the above resource.

    BigqueryOptions ProjectSinkBigqueryOptionsArgs
    Options that affect sinks exporting data to BigQuery. Structure documented below.
    CustomWriterIdentity string
    A user managed service account that will be used to write the log entries. The format must be serviceAccount:some@email. This field can only be specified if you are routing logs to a destination outside this sink's project. If not specified, a Logging service account will automatically be generated.
    Description string
    A description of this sink. The maximum length of the description is 8000 characters.
    Disabled bool
    If set to True, then this sink is disabled and it does not export any log entries.
    Exclusions []ProjectSinkExclusionArgs
    Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusions.filter, it will not be exported. Can be repeated multiple times for multiple exclusions. Structure is documented below.
    Filter string
    The filter to apply when exporting logs. Only log entries that match the filter are exported. See Advanced Log Filters for information on how to write a filter.
    Name string
    The name of the logging sink. Logging automatically creates two sinks: _Required and _Default.
    Project string
    The ID of the project to create the sink in. If omitted, the project associated with the provider is used.
    UniqueWriterIdentity bool
    Whether or not to create a unique identity associated with this sink. If false, then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true (the default), then a unique service account is created and used for this sink. If you wish to publish logs across projects or utilize bigquery_options, you must set unique_writer_identity to true.
    destination String

    The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, a BigQuery dataset, a Cloud Logging bucket, or a Google Cloud project. Examples:

    • storage.googleapis.com/[GCS_BUCKET]
    • bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]
    • pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]/locations/global/buckets/[BUCKET_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]

    The writer associated with the sink must have access to write to the above resource.

    bigqueryOptions ProjectSinkBigqueryOptions
    Options that affect sinks exporting data to BigQuery. Structure documented below.
    customWriterIdentity String
    A user managed service account that will be used to write the log entries. The format must be serviceAccount:some@email. This field can only be specified if you are routing logs to a destination outside this sink's project. If not specified, a Logging service account will automatically be generated.
    description String
    A description of this sink. The maximum length of the description is 8000 characters.
    disabled Boolean
    If set to True, then this sink is disabled and it does not export any log entries.
    exclusions List<ProjectSinkExclusion>
    Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusions.filter, it will not be exported. Can be repeated multiple times for multiple exclusions. Structure is documented below.
    filter String
    The filter to apply when exporting logs. Only log entries that match the filter are exported. See Advanced Log Filters for information on how to write a filter.
    name String
    The name of the logging sink. Logging automatically creates two sinks: _Required and _Default.
    project String
    The ID of the project to create the sink in. If omitted, the project associated with the provider is used.
    uniqueWriterIdentity Boolean
    Whether or not to create a unique identity associated with this sink. If false, then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true (the default), then a unique service account is created and used for this sink. If you wish to publish logs across projects or utilize bigquery_options, you must set unique_writer_identity to true.
    destination string

    The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, a BigQuery dataset, a Cloud Logging bucket, or a Google Cloud project. Examples:

    • storage.googleapis.com/[GCS_BUCKET]
    • bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]
    • pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]/locations/global/buckets/[BUCKET_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]

    The writer associated with the sink must have access to write to the above resource.

    bigqueryOptions ProjectSinkBigqueryOptions
    Options that affect sinks exporting data to BigQuery. Structure documented below.
    customWriterIdentity string
    A user managed service account that will be used to write the log entries. The format must be serviceAccount:some@email. This field can only be specified if you are routing logs to a destination outside this sink's project. If not specified, a Logging service account will automatically be generated.
    description string
    A description of this sink. The maximum length of the description is 8000 characters.
    disabled boolean
    If set to True, then this sink is disabled and it does not export any log entries.
    exclusions ProjectSinkExclusion[]
    Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusions.filter, it will not be exported. Can be repeated multiple times for multiple exclusions. Structure is documented below.
    filter string
    The filter to apply when exporting logs. Only log entries that match the filter are exported. See Advanced Log Filters for information on how to write a filter.
    name string
    The name of the logging sink. Logging automatically creates two sinks: _Required and _Default.
    project string
    The ID of the project to create the sink in. If omitted, the project associated with the provider is used.
    uniqueWriterIdentity boolean
    Whether or not to create a unique identity associated with this sink. If false, then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true (the default), then a unique service account is created and used for this sink. If you wish to publish logs across projects or utilize bigquery_options, you must set unique_writer_identity to true.
    destination str

    The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, a BigQuery dataset, a Cloud Logging bucket, or a Google Cloud project. Examples:

    • storage.googleapis.com/[GCS_BUCKET]
    • bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]
    • pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]/locations/global/buckets/[BUCKET_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]

    The writer associated with the sink must have access to write to the above resource.

    bigquery_options ProjectSinkBigqueryOptionsArgs
    Options that affect sinks exporting data to BigQuery. Structure documented below.
    custom_writer_identity str
    A user managed service account that will be used to write the log entries. The format must be serviceAccount:some@email. This field can only be specified if you are routing logs to a destination outside this sink's project. If not specified, a Logging service account will automatically be generated.
    description str
    A description of this sink. The maximum length of the description is 8000 characters.
    disabled bool
    If set to True, then this sink is disabled and it does not export any log entries.
    exclusions Sequence[ProjectSinkExclusionArgs]
    Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusions.filter, it will not be exported. Can be repeated multiple times for multiple exclusions. Structure is documented below.
    filter str
    The filter to apply when exporting logs. Only log entries that match the filter are exported. See Advanced Log Filters for information on how to write a filter.
    name str
    The name of the logging sink. Logging automatically creates two sinks: _Required and _Default.
    project str
    The ID of the project to create the sink in. If omitted, the project associated with the provider is used.
    unique_writer_identity bool
    Whether or not to create a unique identity associated with this sink. If false, then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true (the default), then a unique service account is created and used for this sink. If you wish to publish logs across projects or utilize bigquery_options, you must set unique_writer_identity to true.
    destination String

    The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, a BigQuery dataset, a Cloud Logging bucket, or a Google Cloud project. Examples:

    • storage.googleapis.com/[GCS_BUCKET]
    • bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]
    • pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]/locations/global/buckets/[BUCKET_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]

    The writer associated with the sink must have access to write to the above resource.

    bigqueryOptions Property Map
    Options that affect sinks exporting data to BigQuery. Structure documented below.
    customWriterIdentity String
    A user managed service account that will be used to write the log entries. The format must be serviceAccount:some@email. This field can only be specified if you are routing logs to a destination outside this sink's project. If not specified, a Logging service account will automatically be generated.
    description String
    A description of this sink. The maximum length of the description is 8000 characters.
    disabled Boolean
    If set to True, then this sink is disabled and it does not export any log entries.
    exclusions List<Property Map>
    Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusions.filter, it will not be exported. Can be repeated multiple times for multiple exclusions. Structure is documented below.
    filter String
    The filter to apply when exporting logs. Only log entries that match the filter are exported. See Advanced Log Filters for information on how to write a filter.
    name String
    The name of the logging sink. Logging automatically creates two sinks: _Required and _Default.
    project String
    The ID of the project to create the sink in. If omitted, the project associated with the provider is used.
    uniqueWriterIdentity Boolean
    Whether or not to create a unique identity associated with this sink. If false, then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true (the default), then a unique service account is created and used for this sink. If you wish to publish logs across projects or utilize bigquery_options, you must set unique_writer_identity to true.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the ProjectSink resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    WriterIdentity string
    The identity associated with this sink. This identity must be granted write access to the configured destination.
    Id string
    The provider-assigned unique ID for this managed resource.
    WriterIdentity string
    The identity associated with this sink. This identity must be granted write access to the configured destination.
    id String
    The provider-assigned unique ID for this managed resource.
    writerIdentity String
    The identity associated with this sink. This identity must be granted write access to the configured destination.
    id string
    The provider-assigned unique ID for this managed resource.
    writerIdentity string
    The identity associated with this sink. This identity must be granted write access to the configured destination.
    id str
    The provider-assigned unique ID for this managed resource.
    writer_identity str
    The identity associated with this sink. This identity must be granted write access to the configured destination.
    id String
    The provider-assigned unique ID for this managed resource.
    writerIdentity String
    The identity associated with this sink. This identity must be granted write access to the configured destination.

    Look up Existing ProjectSink Resource

    Get an existing ProjectSink resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: ProjectSinkState, opts?: CustomResourceOptions): ProjectSink
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            bigquery_options: Optional[ProjectSinkBigqueryOptionsArgs] = None,
            custom_writer_identity: Optional[str] = None,
            description: Optional[str] = None,
            destination: Optional[str] = None,
            disabled: Optional[bool] = None,
            exclusions: Optional[Sequence[ProjectSinkExclusionArgs]] = None,
            filter: Optional[str] = None,
            name: Optional[str] = None,
            project: Optional[str] = None,
            unique_writer_identity: Optional[bool] = None,
            writer_identity: Optional[str] = None) -> ProjectSink
    func GetProjectSink(ctx *Context, name string, id IDInput, state *ProjectSinkState, opts ...ResourceOption) (*ProjectSink, error)
    public static ProjectSink Get(string name, Input<string> id, ProjectSinkState? state, CustomResourceOptions? opts = null)
    public static ProjectSink get(String name, Output<String> id, ProjectSinkState state, CustomResourceOptions options)
    resources:  _:    type: gcp:logging:ProjectSink    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    BigqueryOptions ProjectSinkBigqueryOptions
    Options that affect sinks exporting data to BigQuery. Structure documented below.
    CustomWriterIdentity string
    A user managed service account that will be used to write the log entries. The format must be serviceAccount:some@email. This field can only be specified if you are routing logs to a destination outside this sink's project. If not specified, a Logging service account will automatically be generated.
    Description string
    A description of this sink. The maximum length of the description is 8000 characters.
    Destination string

    The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, a BigQuery dataset, a Cloud Logging bucket, or a Google Cloud project. Examples:

    • storage.googleapis.com/[GCS_BUCKET]
    • bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]
    • pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]/locations/global/buckets/[BUCKET_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]

    The writer associated with the sink must have access to write to the above resource.

    Disabled bool
    If set to True, then this sink is disabled and it does not export any log entries.
    Exclusions List<ProjectSinkExclusion>
    Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusions.filter, it will not be exported. Can be repeated multiple times for multiple exclusions. Structure is documented below.
    Filter string
    The filter to apply when exporting logs. Only log entries that match the filter are exported. See Advanced Log Filters for information on how to write a filter.
    Name string
    The name of the logging sink. Logging automatically creates two sinks: _Required and _Default.
    Project string
    The ID of the project to create the sink in. If omitted, the project associated with the provider is used.
    UniqueWriterIdentity bool
    Whether or not to create a unique identity associated with this sink. If false, then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true (the default), then a unique service account is created and used for this sink. If you wish to publish logs across projects or utilize bigquery_options, you must set unique_writer_identity to true.
    WriterIdentity string
    The identity associated with this sink. This identity must be granted write access to the configured destination.
    BigqueryOptions ProjectSinkBigqueryOptionsArgs
    Options that affect sinks exporting data to BigQuery. Structure documented below.
    CustomWriterIdentity string
    A user managed service account that will be used to write the log entries. The format must be serviceAccount:some@email. This field can only be specified if you are routing logs to a destination outside this sink's project. If not specified, a Logging service account will automatically be generated.
    Description string
    A description of this sink. The maximum length of the description is 8000 characters.
    Destination string

    The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, a BigQuery dataset, a Cloud Logging bucket, or a Google Cloud project. Examples:

    • storage.googleapis.com/[GCS_BUCKET]
    • bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]
    • pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]/locations/global/buckets/[BUCKET_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]

    The writer associated with the sink must have access to write to the above resource.

    Disabled bool
    If set to True, then this sink is disabled and it does not export any log entries.
    Exclusions []ProjectSinkExclusionArgs
    Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusions.filter, it will not be exported. Can be repeated multiple times for multiple exclusions. Structure is documented below.
    Filter string
    The filter to apply when exporting logs. Only log entries that match the filter are exported. See Advanced Log Filters for information on how to write a filter.
    Name string
    The name of the logging sink. Logging automatically creates two sinks: _Required and _Default.
    Project string
    The ID of the project to create the sink in. If omitted, the project associated with the provider is used.
    UniqueWriterIdentity bool
    Whether or not to create a unique identity associated with this sink. If false, then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true (the default), then a unique service account is created and used for this sink. If you wish to publish logs across projects or utilize bigquery_options, you must set unique_writer_identity to true.
    WriterIdentity string
    The identity associated with this sink. This identity must be granted write access to the configured destination.
    bigqueryOptions ProjectSinkBigqueryOptions
    Options that affect sinks exporting data to BigQuery. Structure documented below.
    customWriterIdentity String
    A user managed service account that will be used to write the log entries. The format must be serviceAccount:some@email. This field can only be specified if you are routing logs to a destination outside this sink's project. If not specified, a Logging service account will automatically be generated.
    description String
    A description of this sink. The maximum length of the description is 8000 characters.
    destination String

    The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, a BigQuery dataset, a Cloud Logging bucket, or a Google Cloud project. Examples:

    • storage.googleapis.com/[GCS_BUCKET]
    • bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]
    • pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]/locations/global/buckets/[BUCKET_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]

    The writer associated with the sink must have access to write to the above resource.

    disabled Boolean
    If set to True, then this sink is disabled and it does not export any log entries.
    exclusions List<ProjectSinkExclusion>
    Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusions.filter, it will not be exported. Can be repeated multiple times for multiple exclusions. Structure is documented below.
    filter String
    The filter to apply when exporting logs. Only log entries that match the filter are exported. See Advanced Log Filters for information on how to write a filter.
    name String
    The name of the logging sink. Logging automatically creates two sinks: _Required and _Default.
    project String
    The ID of the project to create the sink in. If omitted, the project associated with the provider is used.
    uniqueWriterIdentity Boolean
    Whether or not to create a unique identity associated with this sink. If false, then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true (the default), then a unique service account is created and used for this sink. If you wish to publish logs across projects or utilize bigquery_options, you must set unique_writer_identity to true.
    writerIdentity String
    The identity associated with this sink. This identity must be granted write access to the configured destination.
    bigqueryOptions ProjectSinkBigqueryOptions
    Options that affect sinks exporting data to BigQuery. Structure documented below.
    customWriterIdentity string
    A user managed service account that will be used to write the log entries. The format must be serviceAccount:some@email. This field can only be specified if you are routing logs to a destination outside this sink's project. If not specified, a Logging service account will automatically be generated.
    description string
    A description of this sink. The maximum length of the description is 8000 characters.
    destination string

    The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, a BigQuery dataset, a Cloud Logging bucket, or a Google Cloud project. Examples:

    • storage.googleapis.com/[GCS_BUCKET]
    • bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]
    • pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]/locations/global/buckets/[BUCKET_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]

    The writer associated with the sink must have access to write to the above resource.

    disabled boolean
    If set to True, then this sink is disabled and it does not export any log entries.
    exclusions ProjectSinkExclusion[]
    Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusions.filter, it will not be exported. Can be repeated multiple times for multiple exclusions. Structure is documented below.
    filter string
    The filter to apply when exporting logs. Only log entries that match the filter are exported. See Advanced Log Filters for information on how to write a filter.
    name string
    The name of the logging sink. Logging automatically creates two sinks: _Required and _Default.
    project string
    The ID of the project to create the sink in. If omitted, the project associated with the provider is used.
    uniqueWriterIdentity boolean
    Whether or not to create a unique identity associated with this sink. If false, then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true (the default), then a unique service account is created and used for this sink. If you wish to publish logs across projects or utilize bigquery_options, you must set unique_writer_identity to true.
    writerIdentity string
    The identity associated with this sink. This identity must be granted write access to the configured destination.
    bigquery_options ProjectSinkBigqueryOptionsArgs
    Options that affect sinks exporting data to BigQuery. Structure documented below.
    custom_writer_identity str
    A user managed service account that will be used to write the log entries. The format must be serviceAccount:some@email. This field can only be specified if you are routing logs to a destination outside this sink's project. If not specified, a Logging service account will automatically be generated.
    description str
    A description of this sink. The maximum length of the description is 8000 characters.
    destination str

    The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, a BigQuery dataset, a Cloud Logging bucket, or a Google Cloud project. Examples:

    • storage.googleapis.com/[GCS_BUCKET]
    • bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]
    • pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]/locations/global/buckets/[BUCKET_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]

    The writer associated with the sink must have access to write to the above resource.

    disabled bool
    If set to True, then this sink is disabled and it does not export any log entries.
    exclusions Sequence[ProjectSinkExclusionArgs]
    Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusions.filter, it will not be exported. Can be repeated multiple times for multiple exclusions. Structure is documented below.
    filter str
    The filter to apply when exporting logs. Only log entries that match the filter are exported. See Advanced Log Filters for information on how to write a filter.
    name str
    The name of the logging sink. Logging automatically creates two sinks: _Required and _Default.
    project str
    The ID of the project to create the sink in. If omitted, the project associated with the provider is used.
    unique_writer_identity bool
    Whether or not to create a unique identity associated with this sink. If false, then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true (the default), then a unique service account is created and used for this sink. If you wish to publish logs across projects or utilize bigquery_options, you must set unique_writer_identity to true.
    writer_identity str
    The identity associated with this sink. This identity must be granted write access to the configured destination.
    bigqueryOptions Property Map
    Options that affect sinks exporting data to BigQuery. Structure documented below.
    customWriterIdentity String
    A user managed service account that will be used to write the log entries. The format must be serviceAccount:some@email. This field can only be specified if you are routing logs to a destination outside this sink's project. If not specified, a Logging service account will automatically be generated.
    description String
    A description of this sink. The maximum length of the description is 8000 characters.
    destination String

    The destination of the sink (or, in other words, where logs are written to). Can be a Cloud Storage bucket, a PubSub topic, a BigQuery dataset, a Cloud Logging bucket, or a Google Cloud project. Examples:

    • storage.googleapis.com/[GCS_BUCKET]
    • bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]
    • pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]/locations/global/buckets/[BUCKET_ID]
    • logging.googleapis.com/projects/[PROJECT_ID]

    The writer associated with the sink must have access to write to the above resource.

    disabled Boolean
    If set to True, then this sink is disabled and it does not export any log entries.
    exclusions List<Property Map>
    Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both filter and one of exclusions.filter, it will not be exported. Can be repeated multiple times for multiple exclusions. Structure is documented below.
    filter String
    The filter to apply when exporting logs. Only log entries that match the filter are exported. See Advanced Log Filters for information on how to write a filter.
    name String
    The name of the logging sink. Logging automatically creates two sinks: _Required and _Default.
    project String
    The ID of the project to create the sink in. If omitted, the project associated with the provider is used.
    uniqueWriterIdentity Boolean
    Whether or not to create a unique identity associated with this sink. If false, then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true (the default), then a unique service account is created and used for this sink. If you wish to publish logs across projects or utilize bigquery_options, you must set unique_writer_identity to true.
    writerIdentity String
    The identity associated with this sink. This identity must be granted write access to the configured destination.

    Supporting Types

    ProjectSinkBigqueryOptions, ProjectSinkBigqueryOptionsArgs

    UsePartitionedTables bool
    Whether to use BigQuery's partition tables. By default, Logging creates dated tables based on the log entries' timestamps, e.g. syslog_20170523. With partitioned tables the date suffix is no longer present and special query syntax has to be used instead. In both cases, tables are sharded based on UTC timezone.
    UsePartitionedTables bool
    Whether to use BigQuery's partition tables. By default, Logging creates dated tables based on the log entries' timestamps, e.g. syslog_20170523. With partitioned tables the date suffix is no longer present and special query syntax has to be used instead. In both cases, tables are sharded based on UTC timezone.
    usePartitionedTables Boolean
    Whether to use BigQuery's partition tables. By default, Logging creates dated tables based on the log entries' timestamps, e.g. syslog_20170523. With partitioned tables the date suffix is no longer present and special query syntax has to be used instead. In both cases, tables are sharded based on UTC timezone.
    usePartitionedTables boolean
    Whether to use BigQuery's partition tables. By default, Logging creates dated tables based on the log entries' timestamps, e.g. syslog_20170523. With partitioned tables the date suffix is no longer present and special query syntax has to be used instead. In both cases, tables are sharded based on UTC timezone.
    use_partitioned_tables bool
    Whether to use BigQuery's partition tables. By default, Logging creates dated tables based on the log entries' timestamps, e.g. syslog_20170523. With partitioned tables the date suffix is no longer present and special query syntax has to be used instead. In both cases, tables are sharded based on UTC timezone.
    usePartitionedTables Boolean
    Whether to use BigQuery's partition tables. By default, Logging creates dated tables based on the log entries' timestamps, e.g. syslog_20170523. With partitioned tables the date suffix is no longer present and special query syntax has to be used instead. In both cases, tables are sharded based on UTC timezone.

    ProjectSinkExclusion, ProjectSinkExclusionArgs

    Filter string
    An advanced logs filter that matches the log entries to be excluded. By using the sample function, you can exclude less than 100% of the matching log entries. See Advanced Log Filters for information on how to write a filter.
    Name string
    A client-assigned identifier, such as load-balancer-exclusion. Identifiers are limited to 100 characters and can include only letters, digits, underscores, hyphens, and periods. First character has to be alphanumeric.
    Description string
    A description of this exclusion.
    Disabled bool
    If set to True, then this exclusion is disabled and it does not exclude any log entries.
    Filter string
    An advanced logs filter that matches the log entries to be excluded. By using the sample function, you can exclude less than 100% of the matching log entries. See Advanced Log Filters for information on how to write a filter.
    Name string
    A client-assigned identifier, such as load-balancer-exclusion. Identifiers are limited to 100 characters and can include only letters, digits, underscores, hyphens, and periods. First character has to be alphanumeric.
    Description string
    A description of this exclusion.
    Disabled bool
    If set to True, then this exclusion is disabled and it does not exclude any log entries.
    filter String
    An advanced logs filter that matches the log entries to be excluded. By using the sample function, you can exclude less than 100% of the matching log entries. See Advanced Log Filters for information on how to write a filter.
    name String
    A client-assigned identifier, such as load-balancer-exclusion. Identifiers are limited to 100 characters and can include only letters, digits, underscores, hyphens, and periods. First character has to be alphanumeric.
    description String
    A description of this exclusion.
    disabled Boolean
    If set to True, then this exclusion is disabled and it does not exclude any log entries.
    filter string
    An advanced logs filter that matches the log entries to be excluded. By using the sample function, you can exclude less than 100% of the matching log entries. See Advanced Log Filters for information on how to write a filter.
    name string
    A client-assigned identifier, such as load-balancer-exclusion. Identifiers are limited to 100 characters and can include only letters, digits, underscores, hyphens, and periods. First character has to be alphanumeric.
    description string
    A description of this exclusion.
    disabled boolean
    If set to True, then this exclusion is disabled and it does not exclude any log entries.
    filter str
    An advanced logs filter that matches the log entries to be excluded. By using the sample function, you can exclude less than 100% of the matching log entries. See Advanced Log Filters for information on how to write a filter.
    name str
    A client-assigned identifier, such as load-balancer-exclusion. Identifiers are limited to 100 characters and can include only letters, digits, underscores, hyphens, and periods. First character has to be alphanumeric.
    description str
    A description of this exclusion.
    disabled bool
    If set to True, then this exclusion is disabled and it does not exclude any log entries.
    filter String
    An advanced logs filter that matches the log entries to be excluded. By using the sample function, you can exclude less than 100% of the matching log entries. See Advanced Log Filters for information on how to write a filter.
    name String
    A client-assigned identifier, such as load-balancer-exclusion. Identifiers are limited to 100 characters and can include only letters, digits, underscores, hyphens, and periods. First character has to be alphanumeric.
    description String
    A description of this exclusion.
    disabled Boolean
    If set to True, then this exclusion is disabled and it does not exclude any log entries.

    Import

    Project-level logging sinks can be imported using their URI, e.g.

    • projects/{{project_id}}/sinks/{{name}}

    When using the pulumi import command, project-level logging sinks can be imported using one of the formats above. For example:

    $ pulumi import gcp:logging/projectSink:ProjectSink default projects/{{project_id}}/sinks/{{name}}
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    Google Cloud (GCP) Classic pulumi/pulumi-gcp
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the google-beta Terraform Provider.
    gcp logo
    Viewing docs for Google Cloud v9.15.0
    published on Thursday, Mar 12, 2026 by Pulumi
      Try Pulumi Cloud free. Your team will thank you.